Compare commits
9 Commits
phase-04
...
5f147cae61
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5f147cae61 | ||
|
|
c4cecbd8dc | ||
|
|
3c0d905e64 | ||
|
|
0c33de607f | ||
|
|
2d063c7db7 | ||
|
|
af6b7bd945 | ||
|
|
1bfd7fbd08 | ||
|
|
6db8ae4492 | ||
|
|
664bb6d275 |
10
.env.example
10
.env.example
@@ -2,6 +2,14 @@
|
||||
# Copy to .env and fill in your values.
|
||||
# .env is gitignored — never commit it.
|
||||
|
||||
# ─── API Keys ──────────────────────────────────────────────────────────────────
|
||||
HUGGING_FACE_API_KEY=
|
||||
OPENROUTER_API_KEY=
|
||||
OPENAI_API_KEY=
|
||||
DEEPSEEK_API_KEY=
|
||||
GEMINI_API_KEY=
|
||||
ELEVENLABS_API_KEY=
|
||||
|
||||
# ─── Data & Paths ──────────────────────────────────────────────────────────────
|
||||
DATA_DIR=${HOME}/homeai-data
|
||||
REPO_DIR=${HOME}/Projects/HomeAI
|
||||
@@ -35,6 +43,7 @@ OLLAMA_FAST_MODEL=qwen2.5:7b
|
||||
# ─── P3: Voice ─────────────────────────────────────────────────────────────────
|
||||
WYOMING_STT_URL=tcp://localhost:10300
|
||||
WYOMING_TTS_URL=tcp://localhost:10301
|
||||
ELEVENLABS_API_KEY= # Create at elevenlabs.io if using elevenlabs TTS engine
|
||||
|
||||
# ─── P4: Agent ─────────────────────────────────────────────────────────────────
|
||||
OPENCLAW_URL=http://localhost:8080
|
||||
@@ -44,3 +53,4 @@ VTUBE_WS_URL=ws://localhost:8001
|
||||
|
||||
# ─── P8: Images ────────────────────────────────────────────────────────────────
|
||||
COMFYUI_URL=http://localhost:8188
|
||||
|
||||
|
||||
83
TODO.md
83
TODO.md
@@ -25,9 +25,11 @@
|
||||
- [x] Write and load launchd plist (`com.homeai.ollama.plist`) — `/opt/homebrew/bin/ollama`
|
||||
- [x] Register local GGUF models via Modelfiles (no download): llama3.3:70b, qwen3:32b, codestral:22b, qwen2.5:7b
|
||||
- [x] Register additional models: EVA-LLaMA-3.33-70B, Midnight-Miqu-70B, QwQ-32B, Qwen3.5-35B, Qwen3-Coder-30B, Qwen3-VL-30B, GLM-4.6V-Flash, DeepSeek-R1-8B, gemma-3-27b
|
||||
- [x] Add qwen3.5:35b-a3b (MoE, Q8_0) — 26.7 tok/s, recommended for voice pipeline
|
||||
- [x] Write model preload script + launchd service (keeps voice model in VRAM permanently)
|
||||
- [x] Deploy Open WebUI via Docker compose (port 3030)
|
||||
- [x] Verify Open WebUI connected to Ollama, all models available
|
||||
- [ ] Run `scripts/benchmark.sh` — record results in `benchmark-results.md`
|
||||
- [x] Run pipeline benchmark (homeai-voice/scripts/benchmark_pipeline.py) — STT/LLM/TTS latency profiled
|
||||
- [ ] Add Ollama + Open WebUI to Uptime Kuma monitors
|
||||
|
||||
---
|
||||
@@ -37,6 +39,7 @@
|
||||
### P3 · homeai-voice
|
||||
|
||||
- [x] Install `wyoming-faster-whisper` — model: faster-whisper-large-v3 (auto-downloaded)
|
||||
- [x] Upgrade STT to wyoming-mlx-whisper (whisper-large-v3-turbo, MLX Metal GPU) — 20x faster (8s → 400ms)
|
||||
- [x] Install Kokoro ONNX TTS — models at `~/models/kokoro/`
|
||||
- [x] Write Wyoming-Kokoro adapter server (`homeai-voice/tts/wyoming_kokoro_server.py`)
|
||||
- [x] Write + load launchd plists for Wyoming STT (10300) and TTS (10301)
|
||||
@@ -44,10 +47,12 @@
|
||||
- [x] Write + load openWakeWord launchd plist (`com.homeai.wakeword`) — DISABLED, replaced by Wyoming satellite
|
||||
- [x] Write `wyoming/test-pipeline.sh` — smoke test (3/3 passing)
|
||||
- [x] Install Wyoming satellite — handles wake word via HA voice pipeline
|
||||
- [x] Connect Home Assistant Wyoming integration (STT + TTS + Satellite)
|
||||
- [x] Install Wyoming satellite for Mac Mini (port 10700)
|
||||
- [ ] Create HA Voice Assistant pipeline with OpenClaw conversation agent
|
||||
- [ ] Test HA Assist via browser: type query → hear spoken response
|
||||
- [x] Write OpenClaw conversation custom component for Home Assistant
|
||||
- [x] Connect Home Assistant Wyoming integration (STT + TTS + Satellite) — ready to configure in HA UI
|
||||
- [x] Create HA Voice Assistant pipeline with OpenClaw conversation agent — component ready, needs HA UI setup
|
||||
- [x] Test HA Assist via browser: type query → hear spoken response
|
||||
- [x] Test full voice loop: wake word → STT → OpenClaw → TTS → audio playback
|
||||
- [ ] Install Chatterbox TTS (MPS build), test with sample `.wav`
|
||||
- [ ] Install Qwen3-TTS via MLX (fallback)
|
||||
- [ ] Train custom wake word using character name
|
||||
@@ -65,31 +70,37 @@
|
||||
- [x] Fix context window: set `contextWindow=32768` for llama3.3:70b in `openclaw.json`
|
||||
- [x] Fix Llama 3.3 Modelfile: add tool-calling TEMPLATE block
|
||||
- [x] Verify `openclaw agent --message "..." --agent main` → completed
|
||||
- [x] Write `skills/home-assistant` SKILL.md — HA REST API control
|
||||
- [x] Write `skills/home-assistant` SKILL.md — HA REST API control via ha-ctl CLI
|
||||
- [x] Write `skills/voice-assistant` SKILL.md — voice response style guide
|
||||
- [x] Wire HASS_TOKEN — create `~/.homeai/hass_token` or set env in launchd plist
|
||||
- [x] Test home-assistant skill: "turn on/off the reading lamp"
|
||||
- [ ] Set up mem0 with Chroma backend, test semantic recall
|
||||
- [ ] Write memory backup launchd job
|
||||
- [ ] Build morning briefing n8n workflow
|
||||
- [ ] Build notification router n8n workflow
|
||||
- [ ] Verify full voice → agent → HA action flow
|
||||
- [ ] Add OpenClaw to Uptime Kuma monitors
|
||||
- [x] Fix HA tool calling: set commands.native=true, symlink ha-ctl to PATH, update TOOLS.md
|
||||
- [x] Test home-assistant skill: "turn on/off the reading lamp" — verified exec→ha-ctl→HA action
|
||||
- [x] Set up mem0 with Chroma backend, test semantic recall
|
||||
- [x] Write memory backup launchd job
|
||||
- [x] Build morning briefing n8n workflow
|
||||
- [x] Build notification router n8n workflow
|
||||
- [x] Verify full voice → agent → HA action flow
|
||||
- [x] Add OpenClaw to Uptime Kuma monitors (Manual user action required)
|
||||
|
||||
### P5 · homeai-character *(can start alongside P4)*
|
||||
|
||||
- [ ] Define and write `schema/character.schema.json` (v1)
|
||||
- [ ] Write `characters/aria.json` — default character
|
||||
- [ ] Set up Vite project in `src/`, install deps
|
||||
- [ ] Integrate existing `character-manager.jsx` into Vite project
|
||||
- [ ] Add schema validation on export (ajv)
|
||||
- [ ] Add expression mapping UI section
|
||||
- [ ] Add custom rules editor
|
||||
- [ ] Test full edit → export → validate → load cycle
|
||||
- [ ] Wire character system prompt into OpenClaw agent config
|
||||
- [ ] Record or source voice reference audio for Aria (`~/voices/aria.wav`)
|
||||
- [ ] Pre-process audio with ffmpeg, test with Chatterbox
|
||||
- [ ] Update `aria.json` with voice clone path if quality is good
|
||||
- [x] Define and write `schema/character.schema.json` (v1)
|
||||
- [x] Write `characters/aria.json` — default character
|
||||
- [x] Set up Vite project in `src/`, install deps
|
||||
- [x] Integrate existing `character-manager.jsx` into Vite project
|
||||
- [x] Add schema validation on export (ajv)
|
||||
- [x] Add expression mapping UI section
|
||||
- [x] Add custom rules editor
|
||||
- [x] Test full edit → export → validate → load cycle
|
||||
- [x] Wire character system prompt into OpenClaw agent config
|
||||
- [x] Record or source voice reference audio for Aria (`~/voices/aria.wav`)
|
||||
- [x] Pre-process audio with ffmpeg, test with Chatterbox
|
||||
- [x] Update `aria.json` with voice clone path if quality is good
|
||||
- [x] Build unified HomeAI dashboard — dark-themed frontend showing live service status + links to individual UIs
|
||||
- [x] Add character profile management to dashboard — store/switch character configs with attached profile images
|
||||
- [x] Add TTS voice preview in character editor — Kokoro preview via OpenClaw bridge with loading state, custom text, stop control
|
||||
- [x] Merge homeai-character + homeai-desktop into unified homeai-dashboard (services, chat, characters, editor)
|
||||
- [ ] Deploy dashboard as Docker container or static site on Mac Mini
|
||||
|
||||
---
|
||||
|
||||
@@ -97,17 +108,19 @@
|
||||
|
||||
### P6 · homeai-esp32
|
||||
|
||||
- [ ] Install ESPHome: `pip install esphome`
|
||||
- [ ] Write `esphome/secrets.yaml` (gitignored)
|
||||
- [ ] Write `base.yaml`, `voice.yaml`, `display.yaml`, `animations.yaml`
|
||||
- [ ] Write `s3-box-living-room.yaml` for first unit
|
||||
- [ ] Flash first unit via USB
|
||||
- [ ] Verify unit appears in HA device list
|
||||
- [ ] Assign Wyoming voice pipeline to unit in HA
|
||||
- [ ] Test full wake → STT → LLM → TTS → audio playback cycle
|
||||
- [ ] Test LVGL face: idle → listening → thinking → speaking → error
|
||||
- [ ] Verify OTA firmware update works wirelessly
|
||||
- [ ] Flash remaining units (bedroom, kitchen, etc.)
|
||||
- [x] Install ESPHome in `~/homeai-esphome-env` (Python 3.12 venv)
|
||||
- [x] Write `esphome/secrets.yaml` (gitignored)
|
||||
- [x] Write `homeai-living-room.yaml` (based on official S3-BOX-3 reference config)
|
||||
- [x] Generate placeholder face illustrations (7 PNGs, 320×240)
|
||||
- [x] Write `setup.sh` with flash/ota/logs/validate commands
|
||||
- [x] Write `deploy.sh` with OTA deploy, image management, multi-unit support
|
||||
- [x] Flash first unit via USB (living room)
|
||||
- [x] Verify unit appears in HA device list (requires HA 2026.x for ESPHome 2025.12+ compat)
|
||||
- [x] Assign Wyoming voice pipeline to unit in HA
|
||||
- [x] Test full wake → STT → LLM → TTS → audio playback cycle
|
||||
- [x] Test display states: idle → listening → thinking → replying → error
|
||||
- [x] Verify OTA firmware update works wirelessly (`deploy.sh --device OTA`)
|
||||
- [ ] Flash remaining units (bedroom, kitchen)
|
||||
- [ ] Document MAC address → room name mapping
|
||||
|
||||
---
|
||||
|
||||
349
VOICE_PIPELINE_STATUS.md
Normal file
349
VOICE_PIPELINE_STATUS.md
Normal file
@@ -0,0 +1,349 @@
|
||||
# Voice Pipeline Status Report
|
||||
|
||||
> Last Updated: 2026-03-08
|
||||
|
||||
---
|
||||
|
||||
## Executive Summary
|
||||
|
||||
The voice pipeline backend is **fully operational** on the Mac Mini. All services are running and tested:
|
||||
|
||||
- ✅ Wyoming STT (Whisper large-v3) - Port 10300
|
||||
- ✅ Wyoming TTS (Kokoro ONNX) - Port 10301
|
||||
- ✅ Wyoming Satellite (wake word + audio) - Port 10700
|
||||
- ✅ OpenClaw Agent (LLM + skills) - Port 8080
|
||||
- ✅ Ollama (local LLM runtime) - Port 11434
|
||||
|
||||
**Next Step**: Manual Home Assistant UI configuration to connect the pipeline.
|
||||
|
||||
---
|
||||
|
||||
## What's Working ✅
|
||||
|
||||
### 1. Speech-to-Text (STT)
|
||||
- **Service**: Wyoming Faster Whisper
|
||||
- **Model**: large-v3 (multilingual, high accuracy)
|
||||
- **Port**: 10300
|
||||
- **Status**: Running via launchd (`com.homeai.wyoming-stt`)
|
||||
- **Test**: `nc -z localhost 10300` ✓
|
||||
|
||||
### 2. Text-to-Speech (TTS)
|
||||
- **Service**: Wyoming Kokoro ONNX
|
||||
- **Voice**: af_heart (default, configurable)
|
||||
- **Port**: 10301
|
||||
- **Status**: Running via launchd (`com.homeai.wyoming-tts`)
|
||||
- **Test**: `nc -z localhost 10301` ✓
|
||||
|
||||
### 3. Wyoming Satellite
|
||||
- **Function**: Wake word detection + audio capture/playback
|
||||
- **Wake Word**: "hey_jarvis" (openWakeWord model)
|
||||
- **Port**: 10700
|
||||
- **Status**: Running via launchd (`com.homeai.wyoming-satellite`)
|
||||
- **Test**: `nc -z localhost 10700` ✓
|
||||
|
||||
### 4. OpenClaw Agent
|
||||
- **Function**: AI agent with tool calling (home automation, etc.)
|
||||
- **Gateway**: WebSocket + CLI
|
||||
- **Port**: 8080
|
||||
- **Status**: Running via launchd (`com.homeai.openclaw`)
|
||||
- **Skills**: home-assistant, voice-assistant
|
||||
- **Test**: `openclaw agent --message "Hello" --agent main` ✓
|
||||
|
||||
### 5. Ollama LLM
|
||||
- **Models**: llama3.3:70b, qwen2.5:7b, and others
|
||||
- **Port**: 11434
|
||||
- **Status**: Running natively
|
||||
- **Test**: `ollama list` ✓
|
||||
|
||||
### 6. Home Assistant Integration
|
||||
- **Custom Component**: OpenClaw Conversation agent created
|
||||
- **Location**: `homeai-agent/custom_components/openclaw_conversation/`
|
||||
- **Features**:
|
||||
- Full conversation agent implementation
|
||||
- Config flow for UI setup
|
||||
- CLI fallback if HTTP unavailable
|
||||
- Error handling and logging
|
||||
- **Status**: Ready for installation
|
||||
|
||||
---
|
||||
|
||||
## What's Pending 🔄
|
||||
|
||||
### Manual Steps Required (Home Assistant UI)
|
||||
|
||||
These steps require access to the Home Assistant web interface at http://10.0.0.199:8123:
|
||||
|
||||
1. **Install OpenClaw Conversation Component**
|
||||
- Copy component to HA server's `/config/custom_components/`
|
||||
- Restart Home Assistant
|
||||
- See: [`homeai-voice/VOICE_PIPELINE_SETUP.md`](homeai-voice/VOICE_PIPELINE_SETUP.md)
|
||||
|
||||
2. **Add Wyoming Integrations**
|
||||
- Settings → Devices & Services → Add Integration → Wyoming Protocol
|
||||
- Add STT (10.0.0.199:10300)
|
||||
- Add TTS (10.0.0.199:10301)
|
||||
- Add Satellite (10.0.0.199:10700)
|
||||
|
||||
3. **Add OpenClaw Conversation**
|
||||
- Settings → Devices & Services → Add Integration → OpenClaw Conversation
|
||||
- Configure: host=10.0.0.199, port=8080, agent=main
|
||||
|
||||
4. **Create Voice Assistant Pipeline**
|
||||
- Settings → Voice Assistants → Add Assistant
|
||||
- Name: "HomeAI with OpenClaw"
|
||||
- STT: Mac Mini STT
|
||||
- Conversation: OpenClaw Conversation
|
||||
- TTS: Mac Mini TTS
|
||||
- Set as preferred
|
||||
|
||||
5. **Test the Pipeline**
|
||||
- Type test: "What time is it?" in HA Assist
|
||||
- Voice test: "Hey Jarvis, turn on the reading lamp"
|
||||
|
||||
### Future Enhancements
|
||||
|
||||
6. **Chatterbox TTS** - Voice cloning for character personality
|
||||
7. **Qwen3-TTS** - Alternative voice synthesis via MLX
|
||||
8. **Custom Wake Word** - Train with character's name
|
||||
9. **Uptime Kuma** - Add monitoring for all services
|
||||
|
||||
---
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌──────────────────────────────────────────────────────────────┐
|
||||
│ Mac Mini M4 Pro │
|
||||
│ (10.0.0.199) │
|
||||
├──────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
|
||||
│ │ Wyoming │ │ Wyoming │ │ Wyoming │ │
|
||||
│ │ STT │ │ TTS │ │ Satellite │ │
|
||||
│ │ :10300 │ │ :10301 │ │ :10700 │ │
|
||||
│ └─────────────┘ └─────────────┘ └─────────────┘ │
|
||||
│ │
|
||||
│ ┌─────────────┐ ┌─────────────┐ │
|
||||
│ │ OpenClaw │ │ Ollama │ │
|
||||
│ │ Gateway │ │ LLM │ │
|
||||
│ │ :8080 │ │ :11434 │ │
|
||||
│ └─────────────┘ └─────────────┘ │
|
||||
│ │
|
||||
└──────────────────────────────────────────────────────────────┘
|
||||
▲
|
||||
│ Wyoming Protocol + HTTP API
|
||||
│
|
||||
┌──────────────────────────────────────────────────────────────┐
|
||||
│ Home Assistant Server │
|
||||
│ (10.0.0.199) │
|
||||
├──────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌─────────────────────────────────────────────────────┐ │
|
||||
│ │ Voice Assistant Pipeline │ │
|
||||
│ │ │ │
|
||||
│ │ Wyoming STT → OpenClaw Conversation → Wyoming TTS │ │
|
||||
│ └─────────────────────────────────────────────────────┘ │
|
||||
│ │
|
||||
│ ┌─────────────────────────────────────────────────────┐ │
|
||||
│ │ OpenClaw Conversation Custom Component │ │
|
||||
│ │ (Routes to OpenClaw Gateway on Mac Mini) │ │
|
||||
│ └─────────────────────────────────────────────────────┘ │
|
||||
│ │
|
||||
└──────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Voice Flow Example
|
||||
|
||||
**User**: "Hey Jarvis, turn on the reading lamp"
|
||||
|
||||
1. **Wake Word Detection** (Wyoming Satellite)
|
||||
- Detects "Hey Jarvis"
|
||||
- Starts recording audio
|
||||
|
||||
2. **Speech-to-Text** (Wyoming STT)
|
||||
- Transcribes: "turn on the reading lamp"
|
||||
- Sends text to Home Assistant
|
||||
|
||||
3. **Conversation Processing** (HA → OpenClaw)
|
||||
- HA Voice Pipeline receives text
|
||||
- Routes to OpenClaw Conversation agent
|
||||
- OpenClaw Gateway processes request
|
||||
|
||||
4. **LLM Processing** (Ollama)
|
||||
- llama3.3:70b generates response
|
||||
- Identifies intent: control light
|
||||
- Calls home-assistant skill
|
||||
|
||||
5. **Action Execution** (Home Assistant API)
|
||||
- OpenClaw calls HA REST API
|
||||
- Turns on "reading lamp" entity
|
||||
- Returns confirmation
|
||||
|
||||
6. **Text-to-Speech** (Wyoming TTS)
|
||||
- Generates audio: "I've turned on the reading lamp"
|
||||
- Sends to Wyoming Satellite
|
||||
|
||||
7. **Audio Playback** (Mac Mini Speaker)
|
||||
- Plays confirmation audio
|
||||
- User hears response
|
||||
|
||||
**Total Latency**: Target < 5 seconds
|
||||
|
||||
---
|
||||
|
||||
## Service Management
|
||||
|
||||
### Check All Services
|
||||
|
||||
```bash
|
||||
# Quick health check
|
||||
./homeai-voice/scripts/test-services.sh
|
||||
|
||||
# Individual service status
|
||||
launchctl list | grep homeai
|
||||
```
|
||||
|
||||
### Restart a Service
|
||||
|
||||
```bash
|
||||
# Example: Restart STT
|
||||
launchctl unload ~/Library/LaunchAgents/com.homeai.wyoming-stt.plist
|
||||
launchctl load ~/Library/LaunchAgents/com.homeai.wyoming-stt.plist
|
||||
```
|
||||
|
||||
### View Logs
|
||||
|
||||
```bash
|
||||
# STT logs
|
||||
tail -f /tmp/homeai-wyoming-stt.log
|
||||
|
||||
# TTS logs
|
||||
tail -f /tmp/homeai-wyoming-tts.log
|
||||
|
||||
# Satellite logs
|
||||
tail -f /tmp/homeai-wyoming-satellite.log
|
||||
|
||||
# OpenClaw logs
|
||||
tail -f /tmp/homeai-openclaw.log
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Key Documentation
|
||||
|
||||
| Document | Purpose |
|
||||
|----------|---------|
|
||||
| [`homeai-voice/VOICE_PIPELINE_SETUP.md`](homeai-voice/VOICE_PIPELINE_SETUP.md) | Complete setup guide with step-by-step HA configuration |
|
||||
| [`homeai-voice/RESUME_WORK.md`](homeai-voice/RESUME_WORK.md) | Quick reference for resuming work |
|
||||
| [`homeai-agent/custom_components/openclaw_conversation/README.md`](homeai-agent/custom_components/openclaw_conversation/README.md) | Custom component documentation |
|
||||
| [`plans/ha-voice-pipeline-implementation.md`](plans/ha-voice-pipeline-implementation.md) | Detailed implementation plan |
|
||||
| [`plans/voice-loop-integration.md`](plans/voice-loop-integration.md) | Architecture options and decisions |
|
||||
|
||||
---
|
||||
|
||||
## Testing
|
||||
|
||||
### Automated Tests
|
||||
|
||||
```bash
|
||||
# Service health check
|
||||
./homeai-voice/scripts/test-services.sh
|
||||
|
||||
# OpenClaw test
|
||||
openclaw agent --message "What time is it?" --agent main
|
||||
|
||||
# Home Assistant skill test
|
||||
openclaw agent --message "Turn on the reading lamp" --agent main
|
||||
```
|
||||
|
||||
### Manual Tests
|
||||
|
||||
1. **Type Test** (HA Assist)
|
||||
- Open HA UI → Click Assist icon
|
||||
- Type: "What time is it?"
|
||||
- Expected: Hear spoken response
|
||||
|
||||
2. **Voice Test** (Wyoming Satellite)
|
||||
- Say: "Hey Jarvis"
|
||||
- Wait for beep
|
||||
- Say: "What time is it?"
|
||||
- Expected: Hear spoken response
|
||||
|
||||
3. **Home Control Test**
|
||||
- Say: "Hey Jarvis"
|
||||
- Say: "Turn on the reading lamp"
|
||||
- Expected: Light turns on + confirmation
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Services Not Running
|
||||
|
||||
```bash
|
||||
# Check launchd
|
||||
launchctl list | grep homeai
|
||||
|
||||
# Reload all services
|
||||
./homeai-voice/scripts/load-all-launchd.sh
|
||||
```
|
||||
|
||||
### Network Issues
|
||||
|
||||
```bash
|
||||
# Test from Mac Mini to HA
|
||||
curl http://10.0.0.199:8123/api/
|
||||
|
||||
# Test ports
|
||||
nc -z localhost 10300 # STT
|
||||
nc -z localhost 10301 # TTS
|
||||
nc -z localhost 10700 # Satellite
|
||||
nc -z localhost 8080 # OpenClaw
|
||||
```
|
||||
|
||||
### Audio Issues
|
||||
|
||||
```bash
|
||||
# Test microphone
|
||||
rec -r 16000 -c 1 test.wav trim 0 5
|
||||
|
||||
# Test speaker
|
||||
afplay /System/Library/Sounds/Glass.aiff
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Next Actions
|
||||
|
||||
1. **Access Home Assistant UI** at http://10.0.0.199:8123
|
||||
2. **Follow setup guide**: [`homeai-voice/VOICE_PIPELINE_SETUP.md`](homeai-voice/VOICE_PIPELINE_SETUP.md)
|
||||
3. **Install OpenClaw component** (see Step 1 in setup guide)
|
||||
4. **Configure Wyoming integrations** (see Step 2 in setup guide)
|
||||
5. **Create voice pipeline** (see Step 4 in setup guide)
|
||||
6. **Test end-to-end** (see Step 5 in setup guide)
|
||||
|
||||
---
|
||||
|
||||
## Success Metrics
|
||||
|
||||
- [ ] All services show green in health check
|
||||
- [ ] Wyoming integrations appear in HA
|
||||
- [ ] OpenClaw Conversation agent registered
|
||||
- [ ] Voice pipeline created and set as default
|
||||
- [ ] Typed query returns spoken response
|
||||
- [ ] Voice query via satellite works
|
||||
- [ ] Home control via voice works
|
||||
- [ ] End-to-end latency < 5 seconds
|
||||
- [ ] Services survive Mac Mini reboot
|
||||
|
||||
---
|
||||
|
||||
## Project Context
|
||||
|
||||
This is **Phase 2** of the HomeAI project. See [`TODO.md`](TODO.md) for the complete project roadmap.
|
||||
|
||||
**Previous Phase**: Phase 1 - Foundation (Infrastructure + LLM) ✅ Complete
|
||||
**Current Phase**: Phase 2 - Voice Pipeline 🔄 Backend Complete, HA Integration Pending
|
||||
**Next Phase**: Phase 3 - Agent & Character (mem0, character system, workflows)
|
||||
115
homeai-agent/custom_components/install-to-docker-ha.sh
Executable file
115
homeai-agent/custom_components/install-to-docker-ha.sh
Executable file
@@ -0,0 +1,115 @@
|
||||
#!/usr/bin/env bash
|
||||
# Install OpenClaw Conversation component to Docker Home Assistant on 10.0.0.199
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
COMPONENT_NAME="openclaw_conversation"
|
||||
HA_HOST="${HA_HOST:-10.0.0.199}"
|
||||
HA_CONTAINER="${HA_CONTAINER:-homeassistant}"
|
||||
|
||||
echo "Installing OpenClaw Conversation to Docker Home Assistant"
|
||||
echo "=========================================================="
|
||||
echo "Host: $HA_HOST"
|
||||
echo "Container: $HA_CONTAINER"
|
||||
echo ""
|
||||
|
||||
# Check if we can reach the host
|
||||
if ! ping -c 1 -W 2 "$HA_HOST" &>/dev/null; then
|
||||
echo "Error: Cannot reach $HA_HOST"
|
||||
echo "Please ensure the server is accessible"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create temporary tarball
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
TARBALL="$TEMP_DIR/openclaw_conversation.tar.gz"
|
||||
|
||||
echo "Creating component archive..."
|
||||
cd "$SCRIPT_DIR"
|
||||
tar -czf "$TARBALL" \
|
||||
--exclude='*.pyc' \
|
||||
--exclude='__pycache__' \
|
||||
--exclude='.DS_Store' \
|
||||
"$COMPONENT_NAME"
|
||||
|
||||
echo "✓ Archive created: $(du -h "$TARBALL" | cut -f1)"
|
||||
echo ""
|
||||
|
||||
# Copy to remote host
|
||||
echo "Copying to $HA_HOST:/tmp/..."
|
||||
if scp -q "$TARBALL" "$HA_HOST:/tmp/openclaw_conversation.tar.gz"; then
|
||||
echo "✓ File copied successfully"
|
||||
else
|
||||
echo "✗ Failed to copy file"
|
||||
echo ""
|
||||
echo "Troubleshooting:"
|
||||
echo " 1. Ensure SSH access is configured: ssh $HA_HOST"
|
||||
echo " 2. Check SSH keys are set up"
|
||||
echo " 3. Try manual copy: scp $TARBALL $HA_HOST:/tmp/"
|
||||
rm -rf "$TEMP_DIR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract into container
|
||||
echo ""
|
||||
echo "Installing into Home Assistant container..."
|
||||
ssh "$HA_HOST" << 'EOF'
|
||||
# Find the Home Assistant container
|
||||
CONTAINER=$(docker ps --filter "name=homeassistant" --format "{{.Names}}" | head -n 1)
|
||||
|
||||
if [ -z "$CONTAINER" ]; then
|
||||
echo "Error: Home Assistant container not found"
|
||||
echo "Available containers:"
|
||||
docker ps --format "{{.Names}}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Found container: $CONTAINER"
|
||||
|
||||
# Copy tarball into container
|
||||
docker cp /tmp/openclaw_conversation.tar.gz "$CONTAINER:/tmp/"
|
||||
|
||||
# Extract into custom_components
|
||||
docker exec "$CONTAINER" sh -c '
|
||||
mkdir -p /config/custom_components
|
||||
cd /config/custom_components
|
||||
tar -xzf /tmp/openclaw_conversation.tar.gz
|
||||
rm /tmp/openclaw_conversation.tar.gz
|
||||
ls -la openclaw_conversation/
|
||||
'
|
||||
|
||||
# Cleanup
|
||||
rm /tmp/openclaw_conversation.tar.gz
|
||||
|
||||
echo ""
|
||||
echo "✓ Component installed successfully!"
|
||||
EOF
|
||||
|
||||
# Cleanup local temp
|
||||
rm -rf "$TEMP_DIR"
|
||||
|
||||
echo ""
|
||||
echo "=========================================================="
|
||||
echo "Installation complete!"
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo " 1. Restart Home Assistant:"
|
||||
echo " ssh $HA_HOST 'docker restart $HA_CONTAINER'"
|
||||
echo ""
|
||||
echo " 2. Open Home Assistant UI: http://$HA_HOST:8123"
|
||||
echo ""
|
||||
echo " 3. Go to Settings → Devices & Services → Add Integration"
|
||||
echo ""
|
||||
echo " 4. Search for 'OpenClaw Conversation'"
|
||||
echo ""
|
||||
echo " 5. Configure:"
|
||||
echo " - OpenClaw Host: 10.0.0.101 ⚠️ (Mac Mini IP, NOT $HA_HOST)"
|
||||
echo " - OpenClaw Port: 8081 (HTTP Bridge port)"
|
||||
echo " - Agent Name: main"
|
||||
echo " - Timeout: 120"
|
||||
echo ""
|
||||
echo " IMPORTANT: All services (OpenClaw, Wyoming STT/TTS/Satellite) run on"
|
||||
echo " 10.0.0.101 (Mac Mini), not $HA_HOST (HA server)"
|
||||
echo ""
|
||||
echo "See VOICE_PIPELINE_SETUP.md for complete configuration guide"
|
||||
@@ -52,12 +52,12 @@ if [[ -d "$TARGET_DIR" && -f "$TARGET_DIR/manifest.json" ]]; then
|
||||
echo " 1. Restart Home Assistant"
|
||||
echo " 2. Go to Settings → Devices & Services → Add Integration"
|
||||
echo " 3. Search for 'OpenClaw Conversation'"
|
||||
echo " 4. Configure the settings (host: localhost, port: 8080)"
|
||||
echo " 4. Configure the settings (host: localhost, port: 8081)"
|
||||
echo ""
|
||||
echo " Or add to configuration.yaml:"
|
||||
echo " openclaw_conversation:"
|
||||
echo " openclaw_host: localhost"
|
||||
echo " openclaw_port: 8080"
|
||||
echo " openclaw_port: 8081"
|
||||
echo " agent_name: main"
|
||||
echo " timeout: 30"
|
||||
else
|
||||
|
||||
@@ -26,7 +26,7 @@ A custom conversation agent for Home Assistant that routes all voice/text querie
|
||||
4. Search for "OpenClaw Conversation"
|
||||
5. Configure the settings:
|
||||
- **OpenClaw Host**: `localhost` (or IP of Mac Mini)
|
||||
- **OpenClaw Port**: `8080`
|
||||
- **OpenClaw Port**: `8081` (HTTP Bridge)
|
||||
- **Agent Name**: `main` (or your configured agent)
|
||||
- **Timeout**: `30` seconds
|
||||
|
||||
@@ -49,7 +49,7 @@ Add to your `configuration.yaml`:
|
||||
```yaml
|
||||
openclaw_conversation:
|
||||
openclaw_host: localhost
|
||||
openclaw_port: 8080
|
||||
openclaw_port: 8081
|
||||
agent_name: main
|
||||
timeout: 30
|
||||
```
|
||||
@@ -95,7 +95,7 @@ Once configured, the OpenClaw agent will be available as a conversation agent in
|
||||
|
||||
1. Verify OpenClaw host/port settings
|
||||
2. Ensure OpenClaw is accessible from HA container/host
|
||||
3. Check network connectivity: `curl http://localhost:8080/status`
|
||||
3. Check network connectivity: `curl http://localhost:8081/status`
|
||||
|
||||
## Files
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ from .const import (
|
||||
DEFAULT_TIMEOUT,
|
||||
DOMAIN,
|
||||
)
|
||||
from .conversation import OpenClawCLIAgent
|
||||
from .conversation import OpenClawAgent
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -57,8 +57,8 @@ async def async_setup(hass: HomeAssistant, config: dict[str, Any]) -> bool:
|
||||
"config": conf,
|
||||
}
|
||||
|
||||
# Register the conversation agent
|
||||
agent = OpenClawCLIAgent(hass, conf)
|
||||
# Register the conversation agent (HTTP-based for cross-network access)
|
||||
agent = OpenClawAgent(hass, conf)
|
||||
|
||||
# Add to conversation agent registry
|
||||
from homeassistant.components import conversation
|
||||
@@ -76,11 +76,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# Store entry data
|
||||
hass.data[DOMAIN][entry.entry_id] = entry.data
|
||||
|
||||
# Register the conversation agent
|
||||
agent = OpenClawCLIAgent(hass, entry.data)
|
||||
# Register the conversation agent (HTTP-based for cross-network access)
|
||||
agent = OpenClawAgent(hass, entry.data)
|
||||
|
||||
from homeassistant.components import conversation
|
||||
conversation.async_set_agent(hass, DOMAIN, agent)
|
||||
conversation.async_set_agent(hass, entry, agent)
|
||||
|
||||
_LOGGER.info("OpenClaw Conversation agent registered from config entry")
|
||||
|
||||
@@ -91,7 +91,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
# Unregister the conversation agent
|
||||
from homeassistant.components import conversation
|
||||
conversation.async_unset_agent(hass, DOMAIN)
|
||||
conversation.async_unset_agent(hass, entry)
|
||||
|
||||
hass.data[DOMAIN].pop(entry.entry_id, None)
|
||||
|
||||
|
||||
@@ -9,10 +9,10 @@ CONF_AGENT_NAME = "agent_name"
|
||||
CONF_TIMEOUT = "timeout"
|
||||
|
||||
# Defaults
|
||||
DEFAULT_HOST = "localhost"
|
||||
DEFAULT_PORT = 8080
|
||||
DEFAULT_HOST = "10.0.0.101"
|
||||
DEFAULT_PORT = 8081 # OpenClaw HTTP Bridge (not 8080 gateway)
|
||||
DEFAULT_AGENT = "main"
|
||||
DEFAULT_TIMEOUT = 30
|
||||
DEFAULT_TIMEOUT = 120
|
||||
|
||||
# API endpoints
|
||||
OPENCLAW_API_PATH = "/api/agent/message"
|
||||
|
||||
@@ -187,8 +187,6 @@ class OpenClawCLIAgent(AbstractConversationAgent):
|
||||
|
||||
async def _call_openclaw_cli(self, message: str) -> str:
|
||||
"""Call OpenClaw CLI and return the response."""
|
||||
import subprocess
|
||||
|
||||
cmd = [
|
||||
"openclaw",
|
||||
"agent",
|
||||
@@ -196,6 +194,7 @@ class OpenClawCLIAgent(AbstractConversationAgent):
|
||||
"--agent", self.agent_name,
|
||||
]
|
||||
|
||||
proc = None
|
||||
try:
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
@@ -215,6 +214,9 @@ class OpenClawCLIAgent(AbstractConversationAgent):
|
||||
return stdout.decode().strip()
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
if proc is not None:
|
||||
proc.kill()
|
||||
await proc.wait()
|
||||
_LOGGER.error("Timeout calling OpenClaw CLI")
|
||||
return "I'm sorry, the request timed out."
|
||||
except FileNotFoundError:
|
||||
|
||||
46
homeai-agent/custom_components/package-for-ha.sh
Executable file
46
homeai-agent/custom_components/package-for-ha.sh
Executable file
@@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env bash
|
||||
# Package OpenClaw Conversation component for Home Assistant installation
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
COMPONENT_NAME="openclaw_conversation"
|
||||
OUTPUT_DIR="$SCRIPT_DIR/dist"
|
||||
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||
ARCHIVE_NAME="openclaw_conversation_${TIMESTAMP}.tar.gz"
|
||||
|
||||
echo "Packaging OpenClaw Conversation component..."
|
||||
echo ""
|
||||
|
||||
# Create dist directory
|
||||
mkdir -p "$OUTPUT_DIR"
|
||||
|
||||
# Create tarball
|
||||
cd "$SCRIPT_DIR"
|
||||
tar -czf "$OUTPUT_DIR/$ARCHIVE_NAME" \
|
||||
--exclude='*.pyc' \
|
||||
--exclude='__pycache__' \
|
||||
--exclude='.DS_Store' \
|
||||
"$COMPONENT_NAME"
|
||||
|
||||
# Create latest symlink
|
||||
cd "$OUTPUT_DIR"
|
||||
ln -sf "$ARCHIVE_NAME" openclaw_conversation_latest.tar.gz
|
||||
|
||||
echo "✓ Package created: $OUTPUT_DIR/$ARCHIVE_NAME"
|
||||
echo ""
|
||||
echo "Installation instructions:"
|
||||
echo ""
|
||||
echo "1. Copy to Home Assistant server:"
|
||||
echo " scp $OUTPUT_DIR/$ARCHIVE_NAME user@10.0.0.199:/tmp/"
|
||||
echo ""
|
||||
echo "2. SSH into Home Assistant server:"
|
||||
echo " ssh user@10.0.0.199"
|
||||
echo ""
|
||||
echo "3. Extract to custom_components:"
|
||||
echo " cd /config/custom_components"
|
||||
echo " tar -xzf /tmp/$ARCHIVE_NAME"
|
||||
echo ""
|
||||
echo "4. Restart Home Assistant"
|
||||
echo ""
|
||||
echo "Or use the install.sh script for automated installation."
|
||||
40
homeai-agent/launchd/com.homeai.openclaw-bridge.plist
Normal file
40
homeai-agent/launchd/com.homeai.openclaw-bridge.plist
Normal file
@@ -0,0 +1,40 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN"
|
||||
"http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.homeai.openclaw-bridge</string>
|
||||
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/Users/aodhan/homeai-voice-env/bin/python3</string>
|
||||
<string>/Users/aodhan/gitea/homeai/homeai-agent/openclaw-http-bridge.py</string>
|
||||
<string>--port</string>
|
||||
<string>8081</string>
|
||||
<string>--host</string>
|
||||
<string>0.0.0.0</string>
|
||||
</array>
|
||||
|
||||
<key>RunAtLoad</key>
|
||||
<true/>
|
||||
|
||||
<key>KeepAlive</key>
|
||||
<true/>
|
||||
|
||||
<key>StandardOutPath</key>
|
||||
<string>/tmp/homeai-openclaw-bridge.log</string>
|
||||
|
||||
<key>StandardErrorPath</key>
|
||||
<string>/tmp/homeai-openclaw-bridge-error.log</string>
|
||||
|
||||
<key>ThrottleInterval</key>
|
||||
<integer>10</integer>
|
||||
|
||||
<key>EnvironmentVariables</key>
|
||||
<dict>
|
||||
<key>PATH</key>
|
||||
<string>/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin</string>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
||||
362
homeai-agent/openclaw-http-bridge.py
Normal file
362
homeai-agent/openclaw-http-bridge.py
Normal file
@@ -0,0 +1,362 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
OpenClaw HTTP Bridge
|
||||
|
||||
A simple HTTP server that translates HTTP POST requests to OpenClaw CLI calls.
|
||||
This allows Home Assistant (running in Docker on a different machine) to
|
||||
communicate with OpenClaw via HTTP.
|
||||
|
||||
Usage:
|
||||
python3 openclaw-http-bridge.py [--port 8081]
|
||||
|
||||
Endpoints:
|
||||
POST /api/agent/message
|
||||
{
|
||||
"message": "Your message here",
|
||||
"agent": "main"
|
||||
}
|
||||
|
||||
Returns:
|
||||
{
|
||||
"response": "OpenClaw response text"
|
||||
}
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
import asyncio
|
||||
from http.server import HTTPServer, BaseHTTPRequestHandler
|
||||
from socketserver import ThreadingMixIn
|
||||
from urllib.parse import urlparse
|
||||
from pathlib import Path
|
||||
import wave
|
||||
import io
|
||||
import re
|
||||
from wyoming.client import AsyncTcpClient
|
||||
from wyoming.tts import Synthesize, SynthesizeVoice
|
||||
from wyoming.asr import Transcribe, Transcript
|
||||
from wyoming.audio import AudioStart, AudioChunk, AudioStop
|
||||
from wyoming.info import Info
|
||||
|
||||
|
||||
def load_character_prompt() -> str:
|
||||
"""Load the active character system prompt."""
|
||||
character_path = Path.home() / ".openclaw" / "characters" / "aria.json"
|
||||
if not character_path.exists():
|
||||
return ""
|
||||
try:
|
||||
with open(character_path) as f:
|
||||
data = json.load(f)
|
||||
return data.get("system_prompt", "")
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
class OpenClawBridgeHandler(BaseHTTPRequestHandler):
|
||||
"""HTTP request handler for OpenClaw bridge."""
|
||||
|
||||
def log_message(self, format, *args):
|
||||
"""Log requests to stderr."""
|
||||
print(f"[OpenClaw Bridge] {self.address_string()} - {format % args}")
|
||||
|
||||
def _send_json_response(self, status_code: int, data: dict):
|
||||
"""Send a JSON response."""
|
||||
self.send_response(status_code)
|
||||
self.send_header("Content-Type", "application/json")
|
||||
self.end_headers()
|
||||
self.wfile.write(json.dumps(data).encode())
|
||||
|
||||
def do_POST(self):
|
||||
"""Handle POST requests."""
|
||||
parsed_path = urlparse(self.path)
|
||||
|
||||
# Handle wake word notification
|
||||
if parsed_path.path == "/wake":
|
||||
self._handle_wake_word()
|
||||
return
|
||||
|
||||
# Handle TTS preview requests
|
||||
if parsed_path.path == "/api/tts":
|
||||
self._handle_tts_request()
|
||||
return
|
||||
|
||||
# Handle STT requests
|
||||
if parsed_path.path == "/api/stt":
|
||||
self._handle_stt_request()
|
||||
return
|
||||
|
||||
# Only handle the agent message endpoint
|
||||
if parsed_path.path == "/api/agent/message":
|
||||
self._handle_agent_request()
|
||||
return
|
||||
|
||||
self._send_json_response(404, {"error": "Not found"})
|
||||
|
||||
def _handle_tts_request(self):
|
||||
"""Handle TTS request and return wav audio."""
|
||||
content_length = int(self.headers.get("Content-Length", 0))
|
||||
if content_length == 0:
|
||||
self._send_json_response(400, {"error": "Empty body"})
|
||||
return
|
||||
|
||||
try:
|
||||
body = self.rfile.read(content_length).decode()
|
||||
data = json.loads(body)
|
||||
except json.JSONDecodeError:
|
||||
self._send_json_response(400, {"error": "Invalid JSON"})
|
||||
return
|
||||
|
||||
text = data.get("text", "Hello, this is a test.")
|
||||
# Strip emojis so TTS doesn't try to read them out
|
||||
text = re.sub(
|
||||
r'[\U0001F600-\U0001F64F\U0001F300-\U0001F5FF\U0001F680-\U0001F6FF'
|
||||
r'\U0001F1E0-\U0001F1FF\U0001F900-\U0001F9FF\U0001FA00-\U0001FAFF'
|
||||
r'\U00002702-\U000027B0\U0000FE00-\U0000FE0F\U0000200D'
|
||||
r'\U00002600-\U000026FF\U00002300-\U000023FF]+', '', text
|
||||
).strip()
|
||||
voice = data.get("voice", "af_heart")
|
||||
|
||||
try:
|
||||
# Run the async Wyoming client
|
||||
audio_bytes = asyncio.run(self._synthesize_audio(text, voice))
|
||||
|
||||
# Send WAV response
|
||||
self.send_response(200)
|
||||
self.send_header("Content-Type", "audio/wav")
|
||||
# Allow CORS for local testing from Vite
|
||||
self.send_header("Access-Control-Allow-Origin", "*")
|
||||
self.end_headers()
|
||||
self.wfile.write(audio_bytes)
|
||||
|
||||
except Exception as e:
|
||||
self._send_json_response(500, {"error": str(e)})
|
||||
|
||||
def do_OPTIONS(self):
|
||||
"""Handle CORS preflight requests."""
|
||||
self.send_response(204)
|
||||
self.send_header("Access-Control-Allow-Origin", "*")
|
||||
self.send_header("Access-Control-Allow-Methods", "POST, GET, OPTIONS")
|
||||
self.send_header("Access-Control-Allow-Headers", "Content-Type")
|
||||
self.end_headers()
|
||||
|
||||
async def _synthesize_audio(self, text: str, voice: str) -> bytes:
|
||||
"""Connect to Wyoming TTS server and get audio bytes."""
|
||||
client = AsyncTcpClient("127.0.0.1", 10301)
|
||||
await client.connect()
|
||||
|
||||
# Read the initial Info event
|
||||
await client.read_event()
|
||||
|
||||
# Send Synthesize event
|
||||
await client.write_event(Synthesize(text=text, voice=SynthesizeVoice(name=voice)).event())
|
||||
|
||||
audio_data = bytearray()
|
||||
rate = 24000
|
||||
width = 2
|
||||
channels = 1
|
||||
|
||||
while True:
|
||||
event = await client.read_event()
|
||||
if event is None:
|
||||
break
|
||||
|
||||
if AudioStart.is_type(event.type):
|
||||
start = AudioStart.from_event(event)
|
||||
rate = start.rate
|
||||
width = start.width
|
||||
channels = start.channels
|
||||
elif AudioChunk.is_type(event.type):
|
||||
chunk = AudioChunk.from_event(event)
|
||||
audio_data.extend(chunk.audio)
|
||||
elif AudioStop.is_type(event.type):
|
||||
break
|
||||
|
||||
await client.disconnect()
|
||||
|
||||
# Package raw PCM into WAV
|
||||
wav_io = io.BytesIO()
|
||||
with wave.open(wav_io, 'wb') as wav_file:
|
||||
wav_file.setnchannels(channels)
|
||||
wav_file.setsampwidth(width)
|
||||
wav_file.setframerate(rate)
|
||||
wav_file.writeframes(audio_data)
|
||||
|
||||
return wav_io.getvalue()
|
||||
|
||||
def _handle_stt_request(self):
|
||||
"""Handle STT request — accept WAV audio, return transcribed text."""
|
||||
content_length = int(self.headers.get("Content-Length", 0))
|
||||
if content_length == 0:
|
||||
self._send_json_response(400, {"error": "Empty body"})
|
||||
return
|
||||
|
||||
try:
|
||||
audio_bytes = self.rfile.read(content_length)
|
||||
|
||||
# Parse WAV to get PCM data and format
|
||||
wav_io = io.BytesIO(audio_bytes)
|
||||
with wave.open(wav_io, 'rb') as wav_file:
|
||||
rate = wav_file.getframerate()
|
||||
width = wav_file.getsampwidth()
|
||||
channels = wav_file.getnchannels()
|
||||
pcm_data = wav_file.readframes(wav_file.getnframes())
|
||||
|
||||
# Run the async Wyoming client
|
||||
text = asyncio.run(self._transcribe_audio(pcm_data, rate, width, channels))
|
||||
|
||||
self.send_response(200)
|
||||
self.send_header("Content-Type", "application/json")
|
||||
self.send_header("Access-Control-Allow-Origin", "*")
|
||||
self.end_headers()
|
||||
self.wfile.write(json.dumps({"text": text}).encode())
|
||||
|
||||
except wave.Error as e:
|
||||
self._send_json_response(400, {"error": f"Invalid WAV: {e}"})
|
||||
except Exception as e:
|
||||
self._send_json_response(500, {"error": str(e)})
|
||||
|
||||
async def _transcribe_audio(self, pcm_data: bytes, rate: int, width: int, channels: int) -> str:
|
||||
"""Connect to Wyoming STT server and transcribe audio."""
|
||||
client = AsyncTcpClient("127.0.0.1", 10300)
|
||||
await client.connect()
|
||||
|
||||
# Send Transcribe request (STT server does not send an initial Info event)
|
||||
await client.write_event(Transcribe(language="en").event())
|
||||
|
||||
# Send audio
|
||||
await client.write_event(AudioStart(rate=rate, width=width, channels=channels).event())
|
||||
|
||||
# Send in chunks (1 second each)
|
||||
bytes_per_second = rate * width * channels
|
||||
for offset in range(0, len(pcm_data), bytes_per_second):
|
||||
chunk = pcm_data[offset:offset + bytes_per_second]
|
||||
await client.write_event(AudioChunk(rate=rate, width=width, channels=channels, audio=chunk).event())
|
||||
|
||||
await client.write_event(AudioStop().event())
|
||||
|
||||
# Read transcript
|
||||
while True:
|
||||
event = await client.read_event()
|
||||
if event is None:
|
||||
break
|
||||
if Transcript.is_type(event.type):
|
||||
transcript = Transcript.from_event(event)
|
||||
await client.disconnect()
|
||||
return transcript.text
|
||||
|
||||
await client.disconnect()
|
||||
return ""
|
||||
|
||||
def _handle_wake_word(self):
|
||||
"""Handle wake word detection notification."""
|
||||
content_length = int(self.headers.get("Content-Length", 0))
|
||||
wake_word_data = {}
|
||||
if content_length > 0:
|
||||
try:
|
||||
body = self.rfile.read(content_length).decode()
|
||||
wake_word_data = json.loads(body)
|
||||
except (json.JSONDecodeError, ConnectionResetError, OSError):
|
||||
# Client may close connection early, that's ok
|
||||
pass
|
||||
|
||||
print(f"[OpenClaw Bridge] Wake word detected: {wake_word_data.get('wake_word', 'unknown')}")
|
||||
self._send_json_response(200, {"status": "ok", "message": "Wake word received"})
|
||||
|
||||
def _handle_agent_request(self):
|
||||
"""Handle agent message request."""
|
||||
content_length = int(self.headers.get("Content-Length", 0))
|
||||
if content_length == 0:
|
||||
self._send_json_response(400, {"error": "Empty body"})
|
||||
return
|
||||
|
||||
try:
|
||||
body = self.rfile.read(content_length).decode()
|
||||
data = json.loads(body)
|
||||
except json.JSONDecodeError:
|
||||
self._send_json_response(400, {"error": "Invalid JSON"})
|
||||
return
|
||||
|
||||
message = data.get("message")
|
||||
agent = data.get("agent", "main")
|
||||
|
||||
if not message:
|
||||
self._send_json_response(400, {"error": "Message is required"})
|
||||
return
|
||||
|
||||
# Inject system prompt
|
||||
system_prompt = load_character_prompt()
|
||||
if system_prompt:
|
||||
message = f"System Context: {system_prompt}\n\nUser Request: {message}"
|
||||
|
||||
# Call OpenClaw CLI (use full path for launchd compatibility)
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["/opt/homebrew/bin/openclaw", "agent", "--message", message, "--agent", agent],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=120,
|
||||
check=True
|
||||
)
|
||||
response_text = result.stdout.strip()
|
||||
self._send_json_response(200, {"response": response_text})
|
||||
except subprocess.TimeoutExpired:
|
||||
self._send_json_response(504, {"error": "OpenClaw command timed out"})
|
||||
except subprocess.CalledProcessError as e:
|
||||
error_msg = e.stderr.strip() if e.stderr else "OpenClaw command failed"
|
||||
self._send_json_response(500, {"error": error_msg})
|
||||
except FileNotFoundError:
|
||||
self._send_json_response(500, {"error": "OpenClaw CLI not found"})
|
||||
except Exception as e:
|
||||
self._send_json_response(500, {"error": str(e)})
|
||||
|
||||
def do_GET(self):
|
||||
"""Handle GET requests (health check)."""
|
||||
parsed_path = urlparse(self.path)
|
||||
|
||||
if parsed_path.path == "/status" or parsed_path.path == "/":
|
||||
self._send_json_response(200, {
|
||||
"status": "ok",
|
||||
"service": "OpenClaw HTTP Bridge",
|
||||
"version": "1.0.0"
|
||||
})
|
||||
else:
|
||||
self._send_json_response(404, {"error": "Not found"})
|
||||
|
||||
|
||||
class ThreadingHTTPServer(ThreadingMixIn, HTTPServer):
|
||||
daemon_threads = True
|
||||
|
||||
|
||||
def main():
|
||||
"""Run the HTTP bridge server."""
|
||||
parser = argparse.ArgumentParser(description="OpenClaw HTTP Bridge")
|
||||
parser.add_argument(
|
||||
"--port",
|
||||
type=int,
|
||||
default=8081,
|
||||
help="Port to listen on (default: 8081)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--host",
|
||||
default="0.0.0.0",
|
||||
help="Host to bind to (default: 0.0.0.0)"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
ThreadingHTTPServer.allow_reuse_address = True
|
||||
server = ThreadingHTTPServer((args.host, args.port), OpenClawBridgeHandler)
|
||||
print(f"OpenClaw HTTP Bridge running on http://{args.host}:{args.port}")
|
||||
print(f"Endpoint: POST http://{args.host}:{args.port}/api/agent/message")
|
||||
print("Press Ctrl+C to stop")
|
||||
|
||||
try:
|
||||
server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
print("\nShutting down...")
|
||||
server.shutdown()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -18,8 +18,26 @@ import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def load_character_prompt() -> str:
|
||||
"""Load the active character system prompt."""
|
||||
character_path = Path.home() / ".openclaw" / "characters" / "aria.json"
|
||||
if not character_path.exists():
|
||||
return ""
|
||||
try:
|
||||
with open(character_path) as f:
|
||||
data = json.load(f)
|
||||
return data.get("system_prompt", "")
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def call_openclaw(message: str, agent: str = "main", timeout: int = 30) -> str:
|
||||
"""Call OpenClaw CLI and return the response."""
|
||||
# Inject system prompt
|
||||
system_prompt = load_character_prompt()
|
||||
if system_prompt:
|
||||
message = f"System Context: {system_prompt}\n\nUser Request: {message}"
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["openclaw", "agent", "--message", message, "--agent", agent],
|
||||
|
||||
24
homeai-character/.gitignore
vendored
Normal file
24
homeai-character/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
lerna-debug.log*
|
||||
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
@@ -1,300 +0,0 @@
|
||||
# P5: homeai-character — Character System & Persona Config
|
||||
|
||||
> Phase 3 | No hard runtime dependencies | Consumed by: P3, P4, P7
|
||||
|
||||
---
|
||||
|
||||
## Goal
|
||||
|
||||
A single, authoritative character configuration that defines the AI assistant's personality, voice, visual expressions, and prompt rules. The Character Manager UI (already started as `character-manager.jsx`) provides a friendly editor. The exported JSON is the single source of truth for all pipeline components.
|
||||
|
||||
---
|
||||
|
||||
## Character JSON Schema v1
|
||||
|
||||
File: `schema/character.schema.json`
|
||||
|
||||
```json
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "HomeAI Character Config",
|
||||
"version": "1",
|
||||
"type": "object",
|
||||
"required": ["schema_version", "name", "system_prompt", "tts"],
|
||||
"properties": {
|
||||
"schema_version": { "type": "integer", "const": 1 },
|
||||
"name": { "type": "string" },
|
||||
"display_name": { "type": "string" },
|
||||
"description": { "type": "string" },
|
||||
|
||||
"system_prompt": { "type": "string" },
|
||||
|
||||
"model_overrides": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"primary": { "type": "string" },
|
||||
"fast": { "type": "string" }
|
||||
}
|
||||
},
|
||||
|
||||
"tts": {
|
||||
"type": "object",
|
||||
"required": ["engine"],
|
||||
"properties": {
|
||||
"engine": {
|
||||
"type": "string",
|
||||
"enum": ["kokoro", "chatterbox", "qwen3"]
|
||||
},
|
||||
"voice_ref_path": { "type": "string" },
|
||||
"kokoro_voice": { "type": "string" },
|
||||
"speed": { "type": "number", "default": 1.0 }
|
||||
}
|
||||
},
|
||||
|
||||
"live2d_expressions": {
|
||||
"type": "object",
|
||||
"description": "Maps semantic state to VTube Studio hotkey ID",
|
||||
"properties": {
|
||||
"idle": { "type": "string" },
|
||||
"listening": { "type": "string" },
|
||||
"thinking": { "type": "string" },
|
||||
"speaking": { "type": "string" },
|
||||
"happy": { "type": "string" },
|
||||
"sad": { "type": "string" },
|
||||
"surprised": { "type": "string" },
|
||||
"error": { "type": "string" }
|
||||
}
|
||||
},
|
||||
|
||||
"vtube_ws_triggers": {
|
||||
"type": "object",
|
||||
"description": "VTube Studio WebSocket actions keyed by event name",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": { "type": "string", "enum": ["hotkey", "parameter"] },
|
||||
"id": { "type": "string" },
|
||||
"value": { "type": "number" }
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"custom_rules": {
|
||||
"type": "array",
|
||||
"description": "Trigger/response overrides for specific contexts",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"trigger": { "type": "string" },
|
||||
"response": { "type": "string" },
|
||||
"condition": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"notes": { "type": "string" }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Default Character: `aria.json`
|
||||
|
||||
File: `characters/aria.json`
|
||||
|
||||
```json
|
||||
{
|
||||
"schema_version": 1,
|
||||
"name": "aria",
|
||||
"display_name": "Aria",
|
||||
"description": "Default HomeAI assistant persona",
|
||||
|
||||
"system_prompt": "You are Aria, a warm, curious, and helpful AI assistant living in the home. You speak naturally and conversationally — never robotic. You are knowledgeable but never condescending. You remember the people you live with and build on those memories over time. Keep responses concise when controlling smart home devices; be more expressive in casual conversation. Never break character.",
|
||||
|
||||
"model_overrides": {
|
||||
"primary": "llama3.3:70b",
|
||||
"fast": "qwen2.5:7b"
|
||||
},
|
||||
|
||||
"tts": {
|
||||
"engine": "kokoro",
|
||||
"kokoro_voice": "af_heart",
|
||||
"voice_ref_path": null,
|
||||
"speed": 1.0
|
||||
},
|
||||
|
||||
"live2d_expressions": {
|
||||
"idle": "expr_idle",
|
||||
"listening": "expr_listening",
|
||||
"thinking": "expr_thinking",
|
||||
"speaking": "expr_speaking",
|
||||
"happy": "expr_happy",
|
||||
"sad": "expr_sad",
|
||||
"surprised": "expr_surprised",
|
||||
"error": "expr_error"
|
||||
},
|
||||
|
||||
"vtube_ws_triggers": {
|
||||
"thinking": { "type": "hotkey", "id": "expr_thinking" },
|
||||
"speaking": { "type": "hotkey", "id": "expr_speaking" },
|
||||
"idle": { "type": "hotkey", "id": "expr_idle" }
|
||||
},
|
||||
|
||||
"custom_rules": [
|
||||
{
|
||||
"trigger": "good morning",
|
||||
"response": "Good morning! How did you sleep?",
|
||||
"condition": "time_of_day == morning"
|
||||
}
|
||||
],
|
||||
|
||||
"notes": "Default persona. Voice clone to be added once reference audio recorded."
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Character Manager UI
|
||||
|
||||
### Status
|
||||
|
||||
`character-manager.jsx` already exists — needs:
|
||||
1. Schema validation before export (reject malformed JSONs)
|
||||
2. File system integration: save/load from `characters/` directory
|
||||
3. Live preview of system prompt
|
||||
4. Expression mapping UI for Live2D states
|
||||
|
||||
### Tech Stack
|
||||
|
||||
- React + Vite (local dev server, not deployed)
|
||||
- Tailwind CSS (or minimal CSS)
|
||||
- Runs at `http://localhost:5173` during editing
|
||||
|
||||
### File Structure
|
||||
|
||||
```
|
||||
homeai-character/
|
||||
├── src/
|
||||
│ ├── character-manager.jsx ← existing, extend here
|
||||
│ ├── SchemaValidator.js ← validate against character.schema.json
|
||||
│ ├── ExpressionMapper.jsx ← UI for Live2D expression mapping
|
||||
│ └── main.jsx
|
||||
├── schema/
|
||||
│ └── character.schema.json
|
||||
├── characters/
|
||||
│ ├── aria.json ← default character
|
||||
│ └── .gitkeep
|
||||
├── package.json
|
||||
└── vite.config.js
|
||||
```
|
||||
|
||||
### Character Manager Features
|
||||
|
||||
| Feature | Description |
|
||||
|---|---|
|
||||
| Basic info | name, display name, description |
|
||||
| System prompt | Multi-line editor with char count |
|
||||
| Model overrides | Dropdown: primary + fast model |
|
||||
| TTS config | Engine picker, voice selector, speed slider, voice ref path |
|
||||
| Expression mapping | Table: state → VTube hotkey ID |
|
||||
| VTube WS triggers | JSON editor for advanced triggers |
|
||||
| Custom rules | Add/edit/delete trigger-response pairs |
|
||||
| Notes | Free-text notes field |
|
||||
| Export | Validates schema, writes to `characters/<name>.json` |
|
||||
| Import | Load existing character JSON for editing |
|
||||
|
||||
### Schema Validation
|
||||
|
||||
```javascript
|
||||
import Ajv from 'ajv'
|
||||
import schema from '../schema/character.schema.json'
|
||||
|
||||
const ajv = new Ajv()
|
||||
const validate = ajv.compile(schema)
|
||||
|
||||
export function validateCharacter(config) {
|
||||
const valid = validate(config)
|
||||
if (!valid) throw new Error(ajv.errorsText(validate.errors))
|
||||
return true
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Voice Clone Workflow
|
||||
|
||||
1. Record 30–60 seconds of clean speech at `~/voices/<name>-raw.wav`
|
||||
- Quiet room, consistent mic distance, natural conversational tone
|
||||
2. Pre-process: `ffmpeg -i raw.wav -ar 22050 -ac 1 aria.wav`
|
||||
3. Place at `~/voices/aria.wav`
|
||||
4. Update character JSON: `"voice_ref_path": "~/voices/aria.wav"`, `"engine": "chatterbox"`
|
||||
5. Test: run Chatterbox with the reference, verify voice quality
|
||||
6. If unsatisfactory, try Qwen3-TTS as alternative
|
||||
|
||||
---
|
||||
|
||||
## Pipeline Integration
|
||||
|
||||
### How P4 (OpenClaw) loads the character
|
||||
|
||||
```python
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
def load_character(name: str) -> dict:
|
||||
path = Path.home() / ".openclaw" / "characters" / f"{name}.json"
|
||||
config = json.loads(path.read_text())
|
||||
assert config["schema_version"] == 1, "Unsupported schema version"
|
||||
return config
|
||||
|
||||
# System prompt injection
|
||||
character = load_character("aria")
|
||||
system_prompt = character["system_prompt"]
|
||||
# Pass to Ollama as system message
|
||||
```
|
||||
|
||||
OpenClaw hot-reloads the character JSON on file change — no restart required.
|
||||
|
||||
### How P3 selects TTS engine
|
||||
|
||||
```python
|
||||
character = load_character(active_name)
|
||||
tts_cfg = character["tts"]
|
||||
|
||||
if tts_cfg["engine"] == "chatterbox":
|
||||
tts = ChatterboxTTS(voice_ref=tts_cfg["voice_ref_path"])
|
||||
elif tts_cfg["engine"] == "qwen3":
|
||||
tts = Qwen3TTS()
|
||||
else: # kokoro (default)
|
||||
tts = KokoroWyomingClient(voice=tts_cfg.get("kokoro_voice", "af_heart"))
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Implementation Steps
|
||||
|
||||
- [ ] Define and write `schema/character.schema.json` (v1)
|
||||
- [ ] Write `characters/aria.json` — default character with placeholder expression IDs
|
||||
- [ ] Set up Vite project in `src/` (install deps: `npm install`)
|
||||
- [ ] Integrate existing `character-manager.jsx` into new Vite project
|
||||
- [ ] Add schema validation on export (`ajv`)
|
||||
- [ ] Add expression mapping UI section
|
||||
- [ ] Add custom rules editor
|
||||
- [ ] Test full edit → export → validate → load cycle
|
||||
- [ ] Record or source voice reference audio for Aria
|
||||
- [ ] Pre-process audio and test with Chatterbox
|
||||
- [ ] Update `aria.json` with voice clone path if quality is good
|
||||
- [ ] Write `SchemaValidator.js` as standalone utility (used by P4 at runtime too)
|
||||
- [ ] Document schema in `schema/README.md`
|
||||
|
||||
---
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] `aria.json` validates against `character.schema.json` without errors
|
||||
- [ ] Character Manager UI can load, edit, and export `aria.json`
|
||||
- [ ] OpenClaw loads `aria.json` system prompt and applies it to Ollama requests
|
||||
- [ ] P3 TTS engine selection correctly follows `tts.engine` field
|
||||
- [ ] Schema version check in P4 fails gracefully with a clear error message
|
||||
- [ ] Voice clone sounds natural (if Chatterbox path taken)
|
||||
16
homeai-character/README.md
Normal file
16
homeai-character/README.md
Normal file
@@ -0,0 +1,16 @@
|
||||
# React + Vite
|
||||
|
||||
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
|
||||
|
||||
Currently, two official plugins are available:
|
||||
|
||||
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Babel](https://babeljs.io/) (or [oxc](https://oxc.rs) when used in [rolldown-vite](https://vite.dev/guide/rolldown)) for Fast Refresh
|
||||
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
|
||||
|
||||
## React Compiler
|
||||
|
||||
The React Compiler is not enabled on this template because of its impact on dev & build performances. To add it, see [this documentation](https://react.dev/learn/react-compiler/installation).
|
||||
|
||||
## Expanding the ESLint configuration
|
||||
|
||||
If you are developing a production application, we recommend using TypeScript with type-aware lint rules enabled. Check out the [TS template](https://github.com/vitejs/vite/tree/main/packages/create-vite/template-react-ts) for information on how to integrate TypeScript and [`typescript-eslint`](https://typescript-eslint.io) in your project.
|
||||
@@ -1,686 +0,0 @@
|
||||
import { useState, useEffect, useCallback } from "react";
|
||||
|
||||
const STORAGE_KEY = "ai-character-profiles";
|
||||
|
||||
const DEFAULT_MODELS = [
|
||||
"llama3.3:70b", "qwen2.5:72b", "mistral-large", "llama3.1:8b",
|
||||
"qwen2.5:14b", "gemma3:27b", "deepseek-r1:14b", "phi4:14b"
|
||||
];
|
||||
|
||||
const TTS_MODELS = ["Kokoro", "Chatterbox", "F5-TTS", "Qwen3-TTS", "Piper"];
|
||||
const STT_MODELS = ["Whisper Large-v3", "Whisper Medium", "Whisper Small", "Whisper Turbo"];
|
||||
const IMAGE_MODELS = ["SDXL", "Flux.1-dev", "Flux.1-schnell", "SD 1.5", "Pony Diffusion"];
|
||||
|
||||
const PERSONALITY_TRAITS = [
|
||||
"Warm", "Witty", "Calm", "Energetic", "Sarcastic", "Nurturing",
|
||||
"Curious", "Playful", "Formal", "Casual", "Empathetic", "Direct",
|
||||
"Creative", "Analytical", "Protective", "Mischievous"
|
||||
];
|
||||
|
||||
const SPEAKING_STYLES = [
|
||||
"Conversational", "Poetic", "Concise", "Verbose", "Academic",
|
||||
"Informal", "Dramatic", "Deadpan", "Enthusiastic", "Measured"
|
||||
];
|
||||
|
||||
const EMPTY_CHARACTER = {
|
||||
id: null,
|
||||
name: "",
|
||||
tagline: "",
|
||||
avatar: "",
|
||||
accentColor: "#7c6fff",
|
||||
personality: {
|
||||
traits: [],
|
||||
speakingStyle: "",
|
||||
coreValues: "",
|
||||
quirks: "",
|
||||
backstory: "",
|
||||
motivation: "",
|
||||
},
|
||||
prompts: {
|
||||
systemPrompt: "",
|
||||
wakeWordResponse: "",
|
||||
fallbackResponse: "",
|
||||
errorResponse: "",
|
||||
customPrompts: [],
|
||||
},
|
||||
models: {
|
||||
llm: "",
|
||||
tts: "",
|
||||
stt: "",
|
||||
imageGen: "",
|
||||
voiceCloneRef: "",
|
||||
ttsSpeed: 1.0,
|
||||
temperature: 0.7,
|
||||
},
|
||||
liveRepresentation: {
|
||||
live2dModel: "",
|
||||
idleExpression: "",
|
||||
speakingExpression: "",
|
||||
thinkingExpression: "",
|
||||
happyExpression: "",
|
||||
vtsTriggers: "",
|
||||
},
|
||||
userNotes: "",
|
||||
createdAt: null,
|
||||
updatedAt: null,
|
||||
};
|
||||
|
||||
const TABS = ["Identity", "Personality", "Prompts", "Models", "Live2D", "Notes"];
|
||||
|
||||
const TAB_ICONS = {
|
||||
Identity: "◈",
|
||||
Personality: "◉",
|
||||
Prompts: "◎",
|
||||
Models: "⬡",
|
||||
Live2D: "◇",
|
||||
Notes: "▣",
|
||||
};
|
||||
|
||||
function generateId() {
|
||||
return Date.now().toString(36) + Math.random().toString(36).slice(2);
|
||||
}
|
||||
|
||||
function ColorPicker({ value, onChange }) {
|
||||
const presets = [
|
||||
"#7c6fff","#ff6b9d","#00d4aa","#ff9f43","#48dbfb",
|
||||
"#ff6348","#a29bfe","#fd79a8","#55efc4","#fdcb6e"
|
||||
];
|
||||
return (
|
||||
<div style={{ display: "flex", gap: 8, alignItems: "center", flexWrap: "wrap" }}>
|
||||
{presets.map(c => (
|
||||
<button key={c} onClick={() => onChange(c)} style={{
|
||||
width: 28, height: 28, borderRadius: "50%", background: c, border: value === c ? "3px solid #fff" : "3px solid transparent",
|
||||
cursor: "pointer", outline: "none", boxShadow: value === c ? `0 0 0 2px ${c}` : "none", transition: "all 0.2s"
|
||||
}} />
|
||||
))}
|
||||
<input type="color" value={value} onChange={e => onChange(e.target.value)}
|
||||
style={{ width: 28, height: 28, borderRadius: "50%", border: "none", cursor: "pointer", background: "none", padding: 0 }} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function TagSelector({ options, selected, onChange, max = 6 }) {
|
||||
return (
|
||||
<div style={{ display: "flex", flexWrap: "wrap", gap: 8 }}>
|
||||
{options.map(opt => {
|
||||
const active = selected.includes(opt);
|
||||
return (
|
||||
<button key={opt} onClick={() => {
|
||||
if (active) onChange(selected.filter(s => s !== opt));
|
||||
else if (selected.length < max) onChange([...selected, opt]);
|
||||
}} style={{
|
||||
padding: "5px 14px", borderRadius: 20, fontSize: 13, fontFamily: "inherit",
|
||||
background: active ? "var(--accent)" : "rgba(255,255,255,0.06)",
|
||||
color: active ? "#fff" : "rgba(255,255,255,0.55)",
|
||||
border: active ? "1px solid var(--accent)" : "1px solid rgba(255,255,255,0.1)",
|
||||
cursor: "pointer", transition: "all 0.18s", fontWeight: active ? 600 : 400,
|
||||
}}>
|
||||
{opt}
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function Field({ label, hint, children }) {
|
||||
return (
|
||||
<div style={{ marginBottom: 22 }}>
|
||||
<label style={{ display: "block", fontSize: 12, fontWeight: 700, letterSpacing: "0.08em", textTransform: "uppercase", color: "rgba(255,255,255,0.45)", marginBottom: 6 }}>
|
||||
{label}
|
||||
</label>
|
||||
{hint && <p style={{ fontSize: 12, color: "rgba(255,255,255,0.3)", marginBottom: 8, marginTop: -2 }}>{hint}</p>}
|
||||
{children}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function Input({ value, onChange, placeholder, type = "text" }) {
|
||||
return (
|
||||
<input type={type} value={value} onChange={e => onChange(e.target.value)} placeholder={placeholder}
|
||||
style={{
|
||||
width: "100%", background: "rgba(255,255,255,0.05)", border: "1px solid rgba(255,255,255,0.1)",
|
||||
borderRadius: 8, padding: "10px 14px", color: "#fff", fontSize: 14, fontFamily: "inherit",
|
||||
outline: "none", boxSizing: "border-box", transition: "border-color 0.2s",
|
||||
}}
|
||||
onFocus={e => e.target.style.borderColor = "var(--accent)"}
|
||||
onBlur={e => e.target.style.borderColor = "rgba(255,255,255,0.1)"}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function Textarea({ value, onChange, placeholder, rows = 4 }) {
|
||||
return (
|
||||
<textarea value={value} onChange={e => onChange(e.target.value)} placeholder={placeholder} rows={rows}
|
||||
style={{
|
||||
width: "100%", background: "rgba(255,255,255,0.05)", border: "1px solid rgba(255,255,255,0.1)",
|
||||
borderRadius: 8, padding: "10px 14px", color: "#fff", fontSize: 14, fontFamily: "inherit",
|
||||
outline: "none", boxSizing: "border-box", resize: "vertical", lineHeight: 1.6,
|
||||
transition: "border-color 0.2s",
|
||||
}}
|
||||
onFocus={e => e.target.style.borderColor = "var(--accent)"}
|
||||
onBlur={e => e.target.style.borderColor = "rgba(255,255,255,0.1)"}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function Select({ value, onChange, options, placeholder }) {
|
||||
return (
|
||||
<select value={value} onChange={e => onChange(e.target.value)}
|
||||
style={{
|
||||
width: "100%", background: "rgba(20,20,35,0.95)", border: "1px solid rgba(255,255,255,0.1)",
|
||||
borderRadius: 8, padding: "10px 14px", color: value ? "#fff" : "rgba(255,255,255,0.35)",
|
||||
fontSize: 14, fontFamily: "inherit", outline: "none", cursor: "pointer",
|
||||
appearance: "none", backgroundImage: `url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='12' height='8' viewBox='0 0 12 8'%3E%3Cpath d='M1 1l5 5 5-5' stroke='rgba(255,255,255,0.3)' stroke-width='2' fill='none'/%3E%3C/svg%3E")`,
|
||||
backgroundRepeat: "no-repeat", backgroundPosition: "right 14px center",
|
||||
}}>
|
||||
<option value="">{placeholder || "Select..."}</option>
|
||||
{options.map(o => <option key={o} value={o}>{o}</option>)}
|
||||
</select>
|
||||
);
|
||||
}
|
||||
|
||||
function Slider({ value, onChange, min, max, step, label }) {
|
||||
return (
|
||||
<div style={{ display: "flex", alignItems: "center", gap: 14 }}>
|
||||
<input type="range" min={min} max={max} step={step} value={value}
|
||||
onChange={e => onChange(parseFloat(e.target.value))}
|
||||
style={{ flex: 1, accentColor: "var(--accent)", cursor: "pointer" }} />
|
||||
<span style={{ fontSize: 14, color: "rgba(255,255,255,0.7)", minWidth: 38, textAlign: "right", fontVariantNumeric: "tabular-nums" }}>
|
||||
{value.toFixed(1)}
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function CustomPromptsEditor({ prompts, onChange }) {
|
||||
const add = () => onChange([...prompts, { trigger: "", response: "" }]);
|
||||
const remove = i => onChange(prompts.filter((_, idx) => idx !== i));
|
||||
const update = (i, field, val) => {
|
||||
const next = [...prompts];
|
||||
next[i] = { ...next[i], [field]: val };
|
||||
onChange(next);
|
||||
};
|
||||
return (
|
||||
<div>
|
||||
{prompts.map((p, i) => (
|
||||
<div key={i} style={{ background: "rgba(255,255,255,0.04)", borderRadius: 10, padding: 14, marginBottom: 10, position: "relative" }}>
|
||||
<button onClick={() => remove(i)} style={{
|
||||
position: "absolute", top: 10, right: 10, background: "rgba(255,80,80,0.15)",
|
||||
border: "none", color: "#ff6b6b", borderRadius: 6, cursor: "pointer", padding: "2px 8px", fontSize: 12
|
||||
}}>✕</button>
|
||||
<div style={{ marginBottom: 8 }}>
|
||||
<Input value={p.trigger} onChange={v => update(i, "trigger", v)} placeholder="Trigger keyword or context..." />
|
||||
</div>
|
||||
<Textarea value={p.response} onChange={v => update(i, "response", v)} placeholder="Custom response or behaviour..." rows={2} />
|
||||
</div>
|
||||
))}
|
||||
<button onClick={add} style={{
|
||||
width: "100%", padding: "10px", background: "rgba(255,255,255,0.04)",
|
||||
border: "1px dashed rgba(255,255,255,0.15)", borderRadius: 8, color: "rgba(255,255,255,0.45)",
|
||||
cursor: "pointer", fontSize: 13, fontFamily: "inherit", transition: "all 0.2s"
|
||||
}}
|
||||
onMouseEnter={e => e.target.style.borderColor = "var(--accent)"}
|
||||
onMouseLeave={e => e.target.style.borderColor = "rgba(255,255,255,0.15)"}
|
||||
>+ Add Custom Prompt</button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function CharacterCard({ character, active, onSelect, onDelete }) {
|
||||
const initials = character.name ? character.name.slice(0, 2).toUpperCase() : "??";
|
||||
return (
|
||||
<div onClick={() => onSelect(character.id)} style={{
|
||||
padding: "14px 16px", borderRadius: 12, cursor: "pointer", marginBottom: 8,
|
||||
background: active ? `linear-gradient(135deg, ${character.accentColor}22, ${character.accentColor}11)` : "rgba(255,255,255,0.04)",
|
||||
border: active ? `1px solid ${character.accentColor}66` : "1px solid rgba(255,255,255,0.07)",
|
||||
transition: "all 0.2s", position: "relative",
|
||||
}}>
|
||||
<div style={{ display: "flex", alignItems: "center", gap: 12 }}>
|
||||
<div style={{
|
||||
width: 40, height: 40, borderRadius: "50%", background: `linear-gradient(135deg, ${character.accentColor}, ${character.accentColor}88)`,
|
||||
display: "flex", alignItems: "center", justifyContent: "center", fontSize: 14, fontWeight: 800,
|
||||
color: "#fff", flexShrink: 0, boxShadow: `0 4px 12px ${character.accentColor}44`
|
||||
}}>{initials}</div>
|
||||
<div style={{ flex: 1, minWidth: 0 }}>
|
||||
<div style={{ fontWeight: 700, fontSize: 15, color: "#fff", whiteSpace: "nowrap", overflow: "hidden", textOverflow: "ellipsis" }}>
|
||||
{character.name || "Unnamed"}
|
||||
</div>
|
||||
{character.tagline && (
|
||||
<div style={{ fontSize: 12, color: "rgba(255,255,255,0.4)", whiteSpace: "nowrap", overflow: "hidden", textOverflow: "ellipsis" }}>
|
||||
{character.tagline}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<button onClick={e => { e.stopPropagation(); onDelete(character.id); }} style={{
|
||||
background: "none", border: "none", color: "rgba(255,255,255,0.2)", cursor: "pointer",
|
||||
fontSize: 16, padding: "2px 6px", borderRadius: 4, transition: "color 0.15s", flexShrink: 0
|
||||
}}
|
||||
onMouseEnter={e => e.target.style.color = "#ff6b6b"}
|
||||
onMouseLeave={e => e.target.style.color = "rgba(255,255,255,0.2)"}
|
||||
>×</button>
|
||||
</div>
|
||||
{character.personality.traits.length > 0 && (
|
||||
<div style={{ display: "flex", gap: 4, flexWrap: "wrap", marginTop: 10 }}>
|
||||
{character.personality.traits.slice(0, 3).map(t => (
|
||||
<span key={t} style={{
|
||||
fontSize: 10, padding: "2px 8px", borderRadius: 10, fontWeight: 600, letterSpacing: "0.04em",
|
||||
background: `${character.accentColor}22`, color: character.accentColor, border: `1px solid ${character.accentColor}44`
|
||||
}}>{t}</span>
|
||||
))}
|
||||
{character.personality.traits.length > 3 && (
|
||||
<span style={{ fontSize: 10, color: "rgba(255,255,255,0.3)", padding: "2px 4px" }}>+{character.personality.traits.length - 3}</span>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function ExportModal({ character, onClose }) {
|
||||
const json = JSON.stringify(character, null, 2);
|
||||
const [copied, setCopied] = useState(false);
|
||||
const copy = () => {
|
||||
navigator.clipboard.writeText(json);
|
||||
setCopied(true);
|
||||
setTimeout(() => setCopied(false), 2000);
|
||||
};
|
||||
return (
|
||||
<div style={{
|
||||
position: "fixed", inset: 0, background: "rgba(0,0,0,0.7)", zIndex: 100,
|
||||
display: "flex", alignItems: "center", justifyContent: "center", padding: 24
|
||||
}} onClick={onClose}>
|
||||
<div onClick={e => e.stopPropagation()} style={{
|
||||
background: "#13131f", border: "1px solid rgba(255,255,255,0.1)", borderRadius: 16,
|
||||
padding: 28, width: "100%", maxWidth: 640, maxHeight: "80vh", display: "flex", flexDirection: "column"
|
||||
}}>
|
||||
<div style={{ display: "flex", justifyContent: "space-between", alignItems: "center", marginBottom: 16 }}>
|
||||
<h3 style={{ margin: 0, fontSize: 18, color: "#fff" }}>Export Character</h3>
|
||||
<button onClick={onClose} style={{ background: "none", border: "none", color: "rgba(255,255,255,0.4)", fontSize: 22, cursor: "pointer" }}>×</button>
|
||||
</div>
|
||||
<pre style={{
|
||||
flex: 1, overflow: "auto", background: "rgba(0,0,0,0.3)", borderRadius: 10,
|
||||
padding: 16, fontSize: 12, color: "rgba(255,255,255,0.7)", lineHeight: 1.6, margin: 0
|
||||
}}>{json}</pre>
|
||||
<button onClick={copy} style={{
|
||||
marginTop: 16, padding: "12px", background: "var(--accent)", border: "none",
|
||||
borderRadius: 10, color: "#fff", fontWeight: 700, fontSize: 14, cursor: "pointer",
|
||||
fontFamily: "inherit", transition: "opacity 0.2s"
|
||||
}}>{copied ? "✓ Copied!" : "Copy to Clipboard"}</button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default function CharacterManager() {
|
||||
const [characters, setCharacters] = useState([]);
|
||||
const [activeId, setActiveId] = useState(null);
|
||||
const [activeTab, setActiveTab] = useState("Identity");
|
||||
const [exportModal, setExportModal] = useState(false);
|
||||
const [saved, setSaved] = useState(false);
|
||||
|
||||
// Load from storage
|
||||
useEffect(() => {
|
||||
try {
|
||||
const stored = localStorage.getItem(STORAGE_KEY);
|
||||
if (stored) {
|
||||
const parsed = JSON.parse(stored);
|
||||
setCharacters(parsed);
|
||||
if (parsed.length > 0) setActiveId(parsed[0].id);
|
||||
}
|
||||
} catch (e) {}
|
||||
}, []);
|
||||
|
||||
// Save to storage
|
||||
const saveToStorage = useCallback((chars) => {
|
||||
try {
|
||||
localStorage.setItem(STORAGE_KEY, JSON.stringify(chars));
|
||||
} catch (e) {}
|
||||
}, []);
|
||||
|
||||
const activeCharacter = characters.find(c => c.id === activeId) || null;
|
||||
|
||||
const updateCharacter = (updater) => {
|
||||
setCharacters(prev => {
|
||||
const next = prev.map(c => c.id === activeId ? { ...updater(c), updatedAt: new Date().toISOString() } : c);
|
||||
saveToStorage(next);
|
||||
return next;
|
||||
});
|
||||
setSaved(true);
|
||||
setTimeout(() => setSaved(false), 1500);
|
||||
};
|
||||
|
||||
const createCharacter = () => {
|
||||
const newChar = {
|
||||
...JSON.parse(JSON.stringify(EMPTY_CHARACTER)),
|
||||
id: generateId(),
|
||||
accentColor: ["#7c6fff","#ff6b9d","#00d4aa","#ff9f43","#48dbfb"][Math.floor(Math.random() * 5)],
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
const next = [newChar, ...characters];
|
||||
setCharacters(next);
|
||||
setActiveId(newChar.id);
|
||||
setActiveTab("Identity");
|
||||
saveToStorage(next);
|
||||
};
|
||||
|
||||
const deleteCharacter = (id) => {
|
||||
const next = characters.filter(c => c.id !== id);
|
||||
setCharacters(next);
|
||||
saveToStorage(next);
|
||||
if (activeId === id) setActiveId(next.length > 0 ? next[0].id : null);
|
||||
};
|
||||
|
||||
const accentColor = activeCharacter?.accentColor || "#7c6fff";
|
||||
|
||||
const set = (path, value) => {
|
||||
updateCharacter(c => {
|
||||
const parts = path.split(".");
|
||||
const next = JSON.parse(JSON.stringify(c));
|
||||
let obj = next;
|
||||
for (let i = 0; i < parts.length - 1; i++) obj = obj[parts[i]];
|
||||
obj[parts[parts.length - 1]] = value;
|
||||
return next;
|
||||
});
|
||||
};
|
||||
|
||||
const renderTab = () => {
|
||||
if (!activeCharacter) return null;
|
||||
const c = activeCharacter;
|
||||
|
||||
switch (activeTab) {
|
||||
case "Identity":
|
||||
return (
|
||||
<div>
|
||||
<Field label="Character Name">
|
||||
<Input value={c.name} onChange={v => set("name", v)} placeholder="e.g. Aria, Nova, Echo..." />
|
||||
</Field>
|
||||
<Field label="Tagline" hint="A short phrase that captures their essence">
|
||||
<Input value={c.tagline} onChange={v => set("tagline", v)} placeholder="e.g. Your curious, warm-hearted companion" />
|
||||
</Field>
|
||||
<Field label="Accent Color" hint="Used for UI theming and visual identity">
|
||||
<ColorPicker value={c.accentColor} onChange={v => set("accentColor", v)} />
|
||||
</Field>
|
||||
<Field label="Live2D / Avatar Reference" hint="Filename or URL of the character's visual model">
|
||||
<Input value={c.avatar} onChange={v => set("avatar", v)} placeholder="e.g. aria_v2.model3.json" />
|
||||
</Field>
|
||||
<Field label="Backstory" hint="Who are they? Where do they come from? Keep it rich.">
|
||||
<Textarea value={c.personality.backstory} onChange={v => set("personality.backstory", v)}
|
||||
placeholder="Write a detailed origin story, background, and personal history for this character..." rows={5} />
|
||||
</Field>
|
||||
<Field label="Core Motivation" hint="What drives them? What do they care about most?">
|
||||
<Textarea value={c.personality.motivation} onChange={v => set("personality.motivation", v)}
|
||||
placeholder="e.g. A deep desire to help and grow alongside their human companion..." rows={3} />
|
||||
</Field>
|
||||
</div>
|
||||
);
|
||||
|
||||
case "Personality":
|
||||
return (
|
||||
<div>
|
||||
<Field label="Personality Traits" hint={`Select up to 6 traits (${c.personality.traits.length}/6)`}>
|
||||
<TagSelector options={PERSONALITY_TRAITS} selected={c.personality.traits}
|
||||
onChange={v => set("personality.traits", v)} max={6} />
|
||||
</Field>
|
||||
<Field label="Speaking Style">
|
||||
<TagSelector options={SPEAKING_STYLES} selected={c.personality.speakingStyle ? [c.personality.speakingStyle] : []}
|
||||
onChange={v => set("personality.speakingStyle", v[v.length - 1] || "")} max={1} />
|
||||
</Field>
|
||||
<Field label="Core Values" hint="What principles guide their responses and behaviour?">
|
||||
<Textarea value={c.personality.coreValues} onChange={v => set("personality.coreValues", v)}
|
||||
placeholder="e.g. Honesty, kindness, intellectual curiosity, loyalty to their user..." rows={3} />
|
||||
</Field>
|
||||
<Field label="Quirks & Mannerisms" hint="Unique behavioural patterns, phrases, habits that make them feel real">
|
||||
<Textarea value={c.personality.quirks} onChange={v => set("personality.quirks", v)}
|
||||
placeholder="e.g. Tends to use nautical metaphors. Hums softly when thinking. Has strong opinions about tea..." rows={3} />
|
||||
</Field>
|
||||
</div>
|
||||
);
|
||||
|
||||
case "Prompts":
|
||||
return (
|
||||
<div>
|
||||
<Field label="System Prompt" hint="The core instruction set defining who this character is to the LLM">
|
||||
<Textarea value={c.prompts.systemPrompt} onChange={v => set("prompts.systemPrompt", v)}
|
||||
placeholder="You are [name], a [description]. Your personality is [traits]. You speak in a [style] manner. You care deeply about [values]..." rows={8} />
|
||||
</Field>
|
||||
<Field label="Wake Word Response" hint="First response when activated by wake word">
|
||||
<Textarea value={c.prompts.wakeWordResponse} onChange={v => set("prompts.wakeWordResponse", v)}
|
||||
placeholder="e.g. 'Yes? I'm here.' or 'Hmm? What do you need?'" rows={2} />
|
||||
</Field>
|
||||
<Field label="Fallback Response" hint="When the character doesn't understand or can't help">
|
||||
<Textarea value={c.prompts.fallbackResponse} onChange={v => set("prompts.fallbackResponse", v)}
|
||||
placeholder="e.g. 'I'm not sure I follow — could you say that differently?'" rows={2} />
|
||||
</Field>
|
||||
<Field label="Error Response" hint="When something goes wrong technically">
|
||||
<Textarea value={c.prompts.errorResponse} onChange={v => set("prompts.errorResponse", v)}
|
||||
placeholder="e.g. 'Something went wrong on my end. Give me a moment.'" rows={2} />
|
||||
</Field>
|
||||
<Field label="Custom Prompt Rules" hint="Context-specific overrides and triggers">
|
||||
<CustomPromptsEditor prompts={c.prompts.customPrompts}
|
||||
onChange={v => set("prompts.customPrompts", v)} />
|
||||
</Field>
|
||||
</div>
|
||||
);
|
||||
|
||||
case "Models":
|
||||
return (
|
||||
<div>
|
||||
<Field label="LLM (Language Model)" hint="Primary reasoning and conversation model via Ollama">
|
||||
<Select value={c.models.llm} onChange={v => set("models.llm", v)} options={DEFAULT_MODELS} placeholder="Select LLM..." />
|
||||
</Field>
|
||||
<Field label="LLM Temperature" hint="Higher = more creative, lower = more focused">
|
||||
<Slider value={c.models.temperature} onChange={v => set("models.temperature", v)} min={0} max={2} step={0.1} />
|
||||
</Field>
|
||||
<Field label="Text-to-Speech Engine">
|
||||
<Select value={c.models.tts} onChange={v => set("models.tts", v)} options={TTS_MODELS} placeholder="Select TTS..." />
|
||||
</Field>
|
||||
<Field label="TTS Speed">
|
||||
<Slider value={c.models.ttsSpeed} onChange={v => set("models.ttsSpeed", v)} min={0.5} max={2.0} step={0.1} />
|
||||
</Field>
|
||||
<Field label="Voice Clone Reference" hint="Path or filename of reference audio for voice cloning">
|
||||
<Input value={c.models.voiceCloneRef} onChange={v => set("models.voiceCloneRef", v)} placeholder="e.g. /voices/aria_reference.wav" />
|
||||
</Field>
|
||||
<Field label="Speech-to-Text Engine">
|
||||
<Select value={c.models.stt} onChange={v => set("models.stt", v)} options={STT_MODELS} placeholder="Select STT..." />
|
||||
</Field>
|
||||
<Field label="Image Generation Model" hint="Used when character generates images or self-portraits">
|
||||
<Select value={c.models.imageGen} onChange={v => set("models.imageGen", v)} options={IMAGE_MODELS} placeholder="Select image model..." />
|
||||
</Field>
|
||||
</div>
|
||||
);
|
||||
|
||||
case "Live2D":
|
||||
return (
|
||||
<div>
|
||||
<Field label="Live2D Model File" hint="Path to .model3.json file, relative to VTube Studio models folder">
|
||||
<Input value={c.liveRepresentation.live2dModel} onChange={v => set("liveRepresentation.live2dModel", v)} placeholder="e.g. Aria/aria.model3.json" />
|
||||
</Field>
|
||||
<Field label="Idle Expression" hint="VTube Studio expression name when listening/waiting">
|
||||
<Input value={c.liveRepresentation.idleExpression} onChange={v => set("liveRepresentation.idleExpression", v)} placeholder="e.g. idle_blink" />
|
||||
</Field>
|
||||
<Field label="Speaking Expression" hint="Expression triggered when TTS audio is playing">
|
||||
<Input value={c.liveRepresentation.speakingExpression} onChange={v => set("liveRepresentation.speakingExpression", v)} placeholder="e.g. talking_smile" />
|
||||
</Field>
|
||||
<Field label="Thinking Expression" hint="Triggered while LLM is processing a response">
|
||||
<Input value={c.liveRepresentation.thinkingExpression} onChange={v => set("liveRepresentation.thinkingExpression", v)} placeholder="e.g. thinking_tilt" />
|
||||
</Field>
|
||||
<Field label="Happy / Positive Expression" hint="Triggered on positive sentiment responses">
|
||||
<Input value={c.liveRepresentation.happyExpression} onChange={v => set("liveRepresentation.happyExpression", v)} placeholder="e.g. happy_bright" />
|
||||
</Field>
|
||||
<Field label="VTube Studio Custom Triggers" hint="Additional WebSocket API trigger mappings (JSON)">
|
||||
<Textarea value={c.liveRepresentation.vtsTriggers} onChange={v => set("liveRepresentation.vtsTriggers", v)}
|
||||
placeholder={'{\n "on_error": "expression_concerned",\n "on_wake": "expression_alert"\n}'} rows={5} />
|
||||
</Field>
|
||||
</div>
|
||||
);
|
||||
|
||||
case "Notes":
|
||||
return (
|
||||
<div>
|
||||
<Field label="Developer Notes" hint="Freeform notes, ideas, todos, and observations about this character">
|
||||
<Textarea value={c.userNotes} onChange={v => set("userNotes", v)}
|
||||
placeholder={"Ideas, observations, things to try...\n\n- Voice reference sounds slightly too formal, adjust Chatterbox guidance scale\n- Try adding more nautical metaphors to system prompt\n- Need to map 'confused' expression in VTS\n- Consider adding weather awareness skill"}
|
||||
rows={16} />
|
||||
</Field>
|
||||
<div style={{ background: "rgba(255,255,255,0.03)", borderRadius: 10, padding: 16, fontSize: 12, color: "rgba(255,255,255,0.35)", lineHeight: 1.7 }}>
|
||||
<div style={{ marginBottom: 4, fontWeight: 700, color: "rgba(255,255,255,0.45)", letterSpacing: "0.06em", textTransform: "uppercase", fontSize: 11 }}>Character Info</div>
|
||||
<div>ID: <span style={{ color: "rgba(255,255,255,0.5)", fontFamily: "monospace" }}>{c.id}</span></div>
|
||||
{c.createdAt && <div>Created: {new Date(c.createdAt).toLocaleString()}</div>}
|
||||
{c.updatedAt && <div>Updated: {new Date(c.updatedAt).toLocaleString()}</div>}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div style={{
|
||||
"--accent": accentColor,
|
||||
minHeight: "100vh",
|
||||
background: "#0d0d18",
|
||||
color: "#fff",
|
||||
fontFamily: "'DM Sans', 'Segoe UI', system-ui, sans-serif",
|
||||
display: "flex",
|
||||
flexDirection: "column",
|
||||
}}>
|
||||
<style>{`
|
||||
@import url('https://fonts.googleapis.com/css2?family=DM+Sans:wght@400;500;600;700;800&family=DM+Mono:wght@400;500&display=swap');
|
||||
* { box-sizing: border-box; }
|
||||
::-webkit-scrollbar { width: 6px; }
|
||||
::-webkit-scrollbar-track { background: transparent; }
|
||||
::-webkit-scrollbar-thumb { background: rgba(255,255,255,0.1); border-radius: 3px; }
|
||||
input::placeholder, textarea::placeholder { color: rgba(255,255,255,0.2); }
|
||||
select option { background: #13131f; }
|
||||
`}</style>
|
||||
|
||||
{/* Header */}
|
||||
<div style={{
|
||||
padding: "18px 28px", borderBottom: "1px solid rgba(255,255,255,0.06)",
|
||||
display: "flex", alignItems: "center", justifyContent: "space-between",
|
||||
background: "rgba(0,0,0,0.2)", backdropFilter: "blur(10px)",
|
||||
position: "sticky", top: 0, zIndex: 10,
|
||||
}}>
|
||||
<div style={{ display: "flex", alignItems: "center", gap: 14 }}>
|
||||
<div style={{
|
||||
width: 36, height: 36, borderRadius: 10,
|
||||
background: `linear-gradient(135deg, ${accentColor}, ${accentColor}88)`,
|
||||
display: "flex", alignItems: "center", justifyContent: "center", fontSize: 18,
|
||||
boxShadow: `0 4px 16px ${accentColor}44`
|
||||
}}>◈</div>
|
||||
<div>
|
||||
<div style={{ fontWeight: 800, fontSize: 17, letterSpacing: "-0.01em" }}>Character Manager</div>
|
||||
<div style={{ fontSize: 12, color: "rgba(255,255,255,0.35)" }}>AI Personality Configuration</div>
|
||||
</div>
|
||||
</div>
|
||||
<div style={{ display: "flex", gap: 10, alignItems: "center" }}>
|
||||
{saved && <span style={{ fontSize: 12, color: accentColor, fontWeight: 600 }}>✓ Saved</span>}
|
||||
{activeCharacter && (
|
||||
<button onClick={() => setExportModal(true)} style={{
|
||||
padding: "8px 16px", background: "rgba(255,255,255,0.07)", border: "1px solid rgba(255,255,255,0.12)",
|
||||
borderRadius: 8, color: "rgba(255,255,255,0.7)", fontSize: 13, cursor: "pointer",
|
||||
fontFamily: "inherit", fontWeight: 600, transition: "all 0.2s"
|
||||
}}>Export JSON</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div style={{ display: "flex", flex: 1, overflow: "hidden" }}>
|
||||
{/* Sidebar */}
|
||||
<div style={{
|
||||
width: 260, borderRight: "1px solid rgba(255,255,255,0.06)",
|
||||
display: "flex", flexDirection: "column", background: "rgba(0,0,0,0.15)",
|
||||
flexShrink: 0,
|
||||
}}>
|
||||
<div style={{ padding: "16px 16px 8px" }}>
|
||||
<button onClick={createCharacter} style={{
|
||||
width: "100%", padding: "11px", background: `linear-gradient(135deg, ${accentColor}cc, ${accentColor}88)`,
|
||||
border: "none", borderRadius: 10, color: "#fff", fontWeight: 700, fontSize: 14,
|
||||
cursor: "pointer", fontFamily: "inherit", transition: "opacity 0.2s",
|
||||
boxShadow: `0 4px 16px ${accentColor}33`
|
||||
}}>+ New Character</button>
|
||||
</div>
|
||||
<div style={{ flex: 1, overflowY: "auto", padding: "4px 16px 16px" }}>
|
||||
{characters.length === 0 ? (
|
||||
<div style={{ textAlign: "center", padding: "40px 16px", color: "rgba(255,255,255,0.2)", fontSize: 13, lineHeight: 1.6 }}>
|
||||
No characters yet.<br />Create your first one above.
|
||||
</div>
|
||||
) : (
|
||||
characters.map(c => (
|
||||
<CharacterCard key={c.id} character={c} active={c.id === activeId}
|
||||
onSelect={setActiveId} onDelete={deleteCharacter} />
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Main editor */}
|
||||
{activeCharacter ? (
|
||||
<div style={{ flex: 1, display: "flex", flexDirection: "column", overflow: "hidden" }}>
|
||||
{/* Character header */}
|
||||
<div style={{
|
||||
padding: "20px 28px 0", borderBottom: "1px solid rgba(255,255,255,0.06)",
|
||||
background: `linear-gradient(180deg, ${accentColor}0a 0%, transparent 100%)`,
|
||||
}}>
|
||||
<div style={{ display: "flex", alignItems: "center", gap: 16, marginBottom: 18 }}>
|
||||
<div style={{
|
||||
width: 52, height: 52, borderRadius: 16, flexShrink: 0,
|
||||
background: `linear-gradient(135deg, ${accentColor}, ${accentColor}66)`,
|
||||
display: "flex", alignItems: "center", justifyContent: "center",
|
||||
fontSize: 20, fontWeight: 800, boxShadow: `0 6px 20px ${accentColor}44`
|
||||
}}>
|
||||
{activeCharacter.name ? activeCharacter.name.slice(0, 2).toUpperCase() : "??"}
|
||||
</div>
|
||||
<div>
|
||||
<div style={{ fontSize: 22, fontWeight: 800, letterSpacing: "-0.02em", lineHeight: 1.2 }}>
|
||||
{activeCharacter.name || <span style={{ color: "rgba(255,255,255,0.25)" }}>Unnamed Character</span>}
|
||||
</div>
|
||||
{activeCharacter.tagline && (
|
||||
<div style={{ fontSize: 14, color: "rgba(255,255,255,0.45)", marginTop: 2 }}>{activeCharacter.tagline}</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{/* Tabs */}
|
||||
<div style={{ display: "flex", gap: 2 }}>
|
||||
{TABS.map(tab => (
|
||||
<button key={tab} onClick={() => setActiveTab(tab)} style={{
|
||||
padding: "9px 16px", background: "none", border: "none",
|
||||
borderBottom: activeTab === tab ? `2px solid ${accentColor}` : "2px solid transparent",
|
||||
color: activeTab === tab ? "#fff" : "rgba(255,255,255,0.4)",
|
||||
fontSize: 13, fontWeight: activeTab === tab ? 700 : 500,
|
||||
cursor: "pointer", fontFamily: "inherit", transition: "all 0.18s",
|
||||
display: "flex", alignItems: "center", gap: 6,
|
||||
}}>
|
||||
<span style={{ fontSize: 11 }}>{TAB_ICONS[tab]}</span>{tab}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Tab content */}
|
||||
<div style={{ flex: 1, overflowY: "auto", padding: "24px 28px" }}>
|
||||
{renderTab()}
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div style={{
|
||||
flex: 1, display: "flex", alignItems: "center", justifyContent: "center",
|
||||
flexDirection: "column", gap: 16, color: "rgba(255,255,255,0.2)"
|
||||
}}>
|
||||
<div style={{ fontSize: 64, opacity: 0.3 }}>◈</div>
|
||||
<div style={{ fontSize: 16, fontWeight: 600 }}>No character selected</div>
|
||||
<div style={{ fontSize: 13 }}>Create a new character to get started</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{exportModal && activeCharacter && (
|
||||
<ExportModal character={activeCharacter} onClose={() => setExportModal(false)} />
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
29
homeai-character/eslint.config.js
Normal file
29
homeai-character/eslint.config.js
Normal file
@@ -0,0 +1,29 @@
|
||||
import js from '@eslint/js'
|
||||
import globals from 'globals'
|
||||
import reactHooks from 'eslint-plugin-react-hooks'
|
||||
import reactRefresh from 'eslint-plugin-react-refresh'
|
||||
import { defineConfig, globalIgnores } from 'eslint/config'
|
||||
|
||||
export default defineConfig([
|
||||
globalIgnores(['dist']),
|
||||
{
|
||||
files: ['**/*.{js,jsx}'],
|
||||
extends: [
|
||||
js.configs.recommended,
|
||||
reactHooks.configs.flat.recommended,
|
||||
reactRefresh.configs.vite,
|
||||
],
|
||||
languageOptions: {
|
||||
ecmaVersion: 2020,
|
||||
globals: globals.browser,
|
||||
parserOptions: {
|
||||
ecmaVersion: 'latest',
|
||||
ecmaFeatures: { jsx: true },
|
||||
sourceType: 'module',
|
||||
},
|
||||
},
|
||||
rules: {
|
||||
'no-unused-vars': ['error', { varsIgnorePattern: '^[A-Z_]' }],
|
||||
},
|
||||
},
|
||||
])
|
||||
13
homeai-character/index.html
Normal file
13
homeai-character/index.html
Normal file
@@ -0,0 +1,13 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>HomeAI Dashboard</title>
|
||||
</head>
|
||||
<body class="bg-gray-950 text-gray-100">
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.jsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -0,0 +1,38 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.homeai.character-dashboard</string>
|
||||
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/opt/homebrew/bin/npx</string>
|
||||
<string>vite</string>
|
||||
<string>--host</string>
|
||||
<string>--port</string>
|
||||
<string>5173</string>
|
||||
</array>
|
||||
|
||||
<key>WorkingDirectory</key>
|
||||
<string>/Users/aodhan/gitea/homeai/homeai-character</string>
|
||||
|
||||
<key>EnvironmentVariables</key>
|
||||
<dict>
|
||||
<key>PATH</key>
|
||||
<string>/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin</string>
|
||||
<key>HOME</key>
|
||||
<string>/Users/aodhan</string>
|
||||
</dict>
|
||||
|
||||
<key>RunAtLoad</key>
|
||||
<true/>
|
||||
<key>KeepAlive</key>
|
||||
<true/>
|
||||
|
||||
<key>StandardOutPath</key>
|
||||
<string>/tmp/homeai-character-dashboard.log</string>
|
||||
<key>StandardErrorPath</key>
|
||||
<string>/tmp/homeai-character-dashboard-error.log</string>
|
||||
</dict>
|
||||
</plist>
|
||||
3397
homeai-character/package-lock.json
generated
Normal file
3397
homeai-character/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
34
homeai-character/package.json
Normal file
34
homeai-character/package.json
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"name": "homeai-character",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"lint": "eslint .",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tailwindcss/vite": "^4.2.1",
|
||||
"ajv": "^8.18.0",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"react-router-dom": "^7.13.1",
|
||||
"tailwindcss": "^4.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.39.1",
|
||||
"@types/react": "^19.2.7",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@vitejs/plugin-react": "^5.1.1",
|
||||
"eslint": "^9.39.1",
|
||||
"eslint-plugin-react-hooks": "^7.0.1",
|
||||
"eslint-plugin-react-refresh": "^0.4.24",
|
||||
"globals": "^16.5.0",
|
||||
"vite": "^8.0.0-beta.13"
|
||||
},
|
||||
"overrides": {
|
||||
"vite": "^8.0.0-beta.13"
|
||||
}
|
||||
}
|
||||
1
homeai-character/public/vite.svg
Normal file
1
homeai-character/public/vite.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
82
homeai-character/schema/character.schema.json
Normal file
82
homeai-character/schema/character.schema.json
Normal file
@@ -0,0 +1,82 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "HomeAI Character Config",
|
||||
"version": "1",
|
||||
"type": "object",
|
||||
"required": ["schema_version", "name", "system_prompt", "tts"],
|
||||
"properties": {
|
||||
"schema_version": { "type": "integer", "const": 1 },
|
||||
"name": { "type": "string" },
|
||||
"display_name": { "type": "string" },
|
||||
"description": { "type": "string" },
|
||||
|
||||
"system_prompt": { "type": "string" },
|
||||
|
||||
"model_overrides": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"primary": { "type": "string" },
|
||||
"fast": { "type": "string" }
|
||||
}
|
||||
},
|
||||
|
||||
"tts": {
|
||||
"type": "object",
|
||||
"required": ["engine"],
|
||||
"properties": {
|
||||
"engine": {
|
||||
"type": "string",
|
||||
"enum": ["kokoro", "chatterbox", "qwen3", "elevenlabs"]
|
||||
},
|
||||
"voice_ref_path": { "type": "string" },
|
||||
"kokoro_voice": { "type": "string" },
|
||||
"elevenlabs_voice_id": { "type": "string" },
|
||||
"elevenlabs_model": { "type": "string", "default": "eleven_monolingual_v1" },
|
||||
"speed": { "type": "number", "default": 1.0 }
|
||||
}
|
||||
},
|
||||
|
||||
"live2d_expressions": {
|
||||
"type": "object",
|
||||
"description": "Maps semantic state to VTube Studio hotkey ID",
|
||||
"properties": {
|
||||
"idle": { "type": "string" },
|
||||
"listening": { "type": "string" },
|
||||
"thinking": { "type": "string" },
|
||||
"speaking": { "type": "string" },
|
||||
"happy": { "type": "string" },
|
||||
"sad": { "type": "string" },
|
||||
"surprised": { "type": "string" },
|
||||
"error": { "type": "string" }
|
||||
}
|
||||
},
|
||||
|
||||
"vtube_ws_triggers": {
|
||||
"type": "object",
|
||||
"description": "VTube Studio WebSocket actions keyed by event name",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": { "type": "string", "enum": ["hotkey", "parameter"] },
|
||||
"id": { "type": "string" },
|
||||
"value": { "type": "number" }
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"custom_rules": {
|
||||
"type": "array",
|
||||
"description": "Trigger/response overrides for specific contexts",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"trigger": { "type": "string" },
|
||||
"response": { "type": "string" },
|
||||
"condition": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"notes": { "type": "string" }
|
||||
}
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# homeai-character/setup.sh — P5: Character Manager + persona JSON
|
||||
#
|
||||
# Components:
|
||||
# - character.schema.json — v1 character config schema
|
||||
# - aria.json — default character config
|
||||
# - Character Manager UI — Vite/React app for editing (dev server :5173)
|
||||
#
|
||||
# No hard runtime dependencies (can be developed standalone).
|
||||
# Output (aria.json) is consumed by P3, P4, P7.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_DIR="$(cd "${SCRIPT_DIR}/.." && pwd)"
|
||||
source "${REPO_DIR}/scripts/common.sh"
|
||||
|
||||
log_section "P5: Character Manager"
|
||||
detect_platform
|
||||
|
||||
# ─── Prerequisite check ────────────────────────────────────────────────────────
|
||||
log_info "Checking prerequisites..."
|
||||
|
||||
if ! command_exists node; then
|
||||
log_warn "Node.js not found — required for Character Manager UI"
|
||||
log_warn "Install: https://nodejs.org (v18+ recommended)"
|
||||
fi
|
||||
|
||||
# ─── TODO: Implementation ──────────────────────────────────────────────────────
|
||||
cat <<'EOF'
|
||||
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ P5: homeai-character — NOT YET IMPLEMENTED │
|
||||
│ │
|
||||
│ Implementation steps: │
|
||||
│ 1. Create schema/character.schema.json (v1) │
|
||||
│ 2. Create characters/aria.json (default persona) │
|
||||
│ 3. Set up Vite/React project in src/ │
|
||||
│ 4. Extend character-manager.jsx with full UI │
|
||||
│ 5. Add schema validation (ajv) │
|
||||
│ 6. Add expression mapper UI for Live2D │
|
||||
│ 7. Wire export to ~/.openclaw/characters/ │
|
||||
│ │
|
||||
│ Dev server: │
|
||||
│ cd homeai-character && npm run dev → http://localhost:5173 │
|
||||
│ │
|
||||
│ Interface contracts: │
|
||||
│ Output: ~/.openclaw/characters/<name>.json │
|
||||
│ Schema: homeai-character/schema/character.schema.json │
|
||||
└─────────────────────────────────────────────────────────────────┘
|
||||
|
||||
EOF
|
||||
|
||||
log_info "P5 is not yet implemented. See homeai-character/PLAN.md for details."
|
||||
exit 0
|
||||
22
homeai-character/src/App.css
Normal file
22
homeai-character/src/App.css
Normal file
@@ -0,0 +1,22 @@
|
||||
/* Scrollbar styling for dark theme */
|
||||
::-webkit-scrollbar {
|
||||
width: 8px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-track {
|
||||
background: #0a0a0f;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb {
|
||||
background: #374151;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb:hover {
|
||||
background: #4b5563;
|
||||
}
|
||||
|
||||
/* Selection color */
|
||||
::selection {
|
||||
background: rgba(99, 102, 241, 0.3);
|
||||
}
|
||||
112
homeai-character/src/App.jsx
Normal file
112
homeai-character/src/App.jsx
Normal file
@@ -0,0 +1,112 @@
|
||||
import { BrowserRouter, Routes, Route, NavLink } from 'react-router-dom';
|
||||
import ServiceStatus from './ServiceStatus';
|
||||
import CharacterProfiles from './CharacterProfiles';
|
||||
import CharacterManager from './CharacterManager';
|
||||
|
||||
function NavItem({ to, children, icon }) {
|
||||
return (
|
||||
<NavLink
|
||||
to={to}
|
||||
className={({ isActive }) =>
|
||||
`flex items-center gap-3 px-4 py-2.5 rounded-lg text-sm font-medium transition-colors ${
|
||||
isActive
|
||||
? 'bg-gray-800 text-white'
|
||||
: 'text-gray-400 hover:text-gray-200 hover:bg-gray-800/50'
|
||||
}`
|
||||
}
|
||||
>
|
||||
{icon}
|
||||
<span>{children}</span>
|
||||
</NavLink>
|
||||
);
|
||||
}
|
||||
|
||||
function Layout({ children }) {
|
||||
return (
|
||||
<div className="min-h-screen bg-gray-950 flex">
|
||||
{/* Sidebar */}
|
||||
<aside className="w-64 bg-gray-900 border-r border-gray-800 flex flex-col fixed h-full">
|
||||
{/* Logo */}
|
||||
<div className="px-6 py-5 border-b border-gray-800">
|
||||
<div className="flex items-center gap-3">
|
||||
<div className="w-9 h-9 rounded-lg bg-gradient-to-br from-indigo-500 to-purple-600 flex items-center justify-center">
|
||||
<svg className="w-5 h-5 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M2.25 12l8.954-8.955c.44-.439 1.152-.439 1.591 0L21.75 12M4.5 9.75v10.125c0 .621.504 1.125 1.125 1.125H9.75v-4.875c0-.621.504-1.125 1.125-1.125h2.25c.621 0 1.125.504 1.125 1.125V21h4.125c.621 0 1.125-.504 1.125-1.125V9.75M8.25 21h8.25" />
|
||||
</svg>
|
||||
</div>
|
||||
<div>
|
||||
<h1 className="text-lg font-bold text-white tracking-tight">HomeAI</h1>
|
||||
<p className="text-xs text-gray-500">LINDBLUM</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Nav */}
|
||||
<nav className="flex-1 px-3 py-4 space-y-1">
|
||||
<NavItem
|
||||
to="/"
|
||||
icon={
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M3.75 6A2.25 2.25 0 016 3.75h2.25A2.25 2.25 0 0110.5 6v2.25a2.25 2.25 0 01-2.25 2.25H6a2.25 2.25 0 01-2.25-2.25V6zM3.75 15.75A2.25 2.25 0 016 13.5h2.25a2.25 2.25 0 012.25 2.25V18a2.25 2.25 0 01-2.25 2.25H6A2.25 2.25 0 013.75 18v-2.25zM13.5 6a2.25 2.25 0 012.25-2.25H18A2.25 2.25 0 0120.25 6v2.25A2.25 2.25 0 0118 10.5h-2.25a2.25 2.25 0 01-2.25-2.25V6zM13.5 15.75a2.25 2.25 0 012.25-2.25H18a2.25 2.25 0 012.25 2.25V18A2.25 2.25 0 0118 20.25h-2.25A2.25 2.25 0 0113.5 18v-2.25z" />
|
||||
</svg>
|
||||
}
|
||||
>
|
||||
Dashboard
|
||||
</NavItem>
|
||||
|
||||
<NavItem
|
||||
to="/characters"
|
||||
icon={
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M15.75 6a3.75 3.75 0 11-7.5 0 3.75 3.75 0 017.5 0zM4.501 20.118a7.5 7.5 0 0114.998 0A17.933 17.933 0 0112 21.75c-2.676 0-5.216-.584-7.499-1.632z" />
|
||||
</svg>
|
||||
}
|
||||
>
|
||||
Characters
|
||||
</NavItem>
|
||||
|
||||
<NavItem
|
||||
to="/editor"
|
||||
icon={
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M9.594 3.94c.09-.542.56-.94 1.11-.94h2.593c.55 0 1.02.398 1.11.94l.213 1.281c.063.374.313.686.645.87.074.04.147.083.22.127.324.196.72.257 1.075.124l1.217-.456a1.125 1.125 0 011.37.49l1.296 2.247a1.125 1.125 0 01-.26 1.431l-1.003.827c-.293.24-.438.613-.431.992a6.759 6.759 0 010 .255c-.007.378.138.75.43.99l1.005.828c.424.35.534.954.26 1.43l-1.298 2.247a1.125 1.125 0 01-1.369.491l-1.217-.456c-.355-.133-.75-.072-1.076.124a6.57 6.57 0 01-.22.128c-.331.183-.581.495-.644.869l-.213 1.28c-.09.543-.56.941-1.11.941h-2.594c-.55 0-1.02-.398-1.11-.94l-.213-1.281c-.062-.374-.312-.686-.644-.87a6.52 6.52 0 01-.22-.127c-.325-.196-.72-.257-1.076-.124l-1.217.456a1.125 1.125 0 01-1.369-.49l-1.297-2.247a1.125 1.125 0 01.26-1.431l1.004-.827c.292-.24.437-.613.43-.992a6.932 6.932 0 010-.255c.007-.378-.138-.75-.43-.99l-1.004-.828a1.125 1.125 0 01-.26-1.43l1.297-2.247a1.125 1.125 0 011.37-.491l1.216.456c.356.133.751.072 1.076-.124.072-.044.146-.087.22-.128.332-.183.582-.495.644-.869l.214-1.281z" />
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M15 12a3 3 0 11-6 0 3 3 0 016 0z" />
|
||||
</svg>
|
||||
}
|
||||
>
|
||||
Editor
|
||||
</NavItem>
|
||||
</nav>
|
||||
|
||||
{/* Footer */}
|
||||
<div className="px-6 py-4 border-t border-gray-800">
|
||||
<p className="text-xs text-gray-600">HomeAI v0.1.0</p>
|
||||
<p className="text-xs text-gray-700">Mac Mini M4 Pro</p>
|
||||
</div>
|
||||
</aside>
|
||||
|
||||
{/* Main content */}
|
||||
<main className="flex-1 ml-64 p-8">
|
||||
<div className="max-w-6xl mx-auto">
|
||||
{children}
|
||||
</div>
|
||||
</main>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function App() {
|
||||
return (
|
||||
<BrowserRouter>
|
||||
<Layout>
|
||||
<Routes>
|
||||
<Route path="/" element={<ServiceStatus />} />
|
||||
<Route path="/characters" element={<CharacterProfiles />} />
|
||||
<Route path="/editor" element={<CharacterManager />} />
|
||||
</Routes>
|
||||
</Layout>
|
||||
</BrowserRouter>
|
||||
);
|
||||
}
|
||||
|
||||
export default App;
|
||||
585
homeai-character/src/CharacterManager.jsx
Normal file
585
homeai-character/src/CharacterManager.jsx
Normal file
@@ -0,0 +1,585 @@
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import { validateCharacter } from './SchemaValidator';
|
||||
|
||||
const DEFAULT_CHARACTER = {
|
||||
schema_version: 1,
|
||||
name: "aria",
|
||||
display_name: "Aria",
|
||||
description: "Default HomeAI assistant persona",
|
||||
system_prompt: "You are Aria, a warm, curious, and helpful AI assistant living in the home. You speak naturally and conversationally — never robotic. You are knowledgeable but never condescending. You remember the people you live with and build on those memories over time. Keep responses concise when controlling smart home devices; be more expressive in casual conversation. Never break character.",
|
||||
model_overrides: {
|
||||
primary: "llama3.3:70b",
|
||||
fast: "qwen2.5:7b"
|
||||
},
|
||||
tts: {
|
||||
engine: "kokoro",
|
||||
kokoro_voice: "af_heart",
|
||||
speed: 1.0
|
||||
},
|
||||
live2d_expressions: {
|
||||
idle: "expr_idle",
|
||||
listening: "expr_listening",
|
||||
thinking: "expr_thinking",
|
||||
speaking: "expr_speaking",
|
||||
happy: "expr_happy",
|
||||
sad: "expr_sad",
|
||||
surprised: "expr_surprised",
|
||||
error: "expr_error"
|
||||
},
|
||||
vtube_ws_triggers: {
|
||||
thinking: { type: "hotkey", id: "expr_thinking" },
|
||||
speaking: { type: "hotkey", id: "expr_speaking" },
|
||||
idle: { type: "hotkey", id: "expr_idle" }
|
||||
},
|
||||
custom_rules: [
|
||||
{ trigger: "good morning", response: "Good morning! How did you sleep?", condition: "time_of_day == morning" }
|
||||
],
|
||||
notes: ""
|
||||
};
|
||||
|
||||
export default function CharacterManager() {
|
||||
const [character, setCharacter] = useState(() => {
|
||||
// Check if we're editing from profiles page
|
||||
const editData = sessionStorage.getItem('edit_character');
|
||||
if (editData) {
|
||||
sessionStorage.removeItem('edit_character');
|
||||
try {
|
||||
return JSON.parse(editData);
|
||||
} catch {
|
||||
return DEFAULT_CHARACTER;
|
||||
}
|
||||
}
|
||||
return DEFAULT_CHARACTER;
|
||||
});
|
||||
const [error, setError] = useState(null);
|
||||
const [saved, setSaved] = useState(false);
|
||||
|
||||
// TTS preview state
|
||||
const [ttsState, setTtsState] = useState('idle'); // idle | loading | playing
|
||||
const [previewText, setPreviewText] = useState('');
|
||||
const audioRef = useRef(null);
|
||||
const objectUrlRef = useRef(null);
|
||||
|
||||
// ElevenLabs state
|
||||
const [elevenLabsApiKey, setElevenLabsApiKey] = useState(localStorage.getItem('elevenlabs_api_key') || '');
|
||||
const [elevenLabsVoices, setElevenLabsVoices] = useState([]);
|
||||
const [elevenLabsModels, setElevenLabsModels] = useState([]);
|
||||
const [isLoadingElevenLabs, setIsLoadingElevenLabs] = useState(false);
|
||||
|
||||
const fetchElevenLabsData = async (key) => {
|
||||
if (!key) return;
|
||||
setIsLoadingElevenLabs(true);
|
||||
try {
|
||||
const headers = { 'xi-api-key': key };
|
||||
const [voicesRes, modelsRes] = await Promise.all([
|
||||
fetch('https://api.elevenlabs.io/v1/voices', { headers }),
|
||||
fetch('https://api.elevenlabs.io/v1/models', { headers })
|
||||
]);
|
||||
if (!voicesRes.ok || !modelsRes.ok) {
|
||||
throw new Error('Failed to fetch from ElevenLabs API (check API key)');
|
||||
}
|
||||
const voicesData = await voicesRes.json();
|
||||
const modelsData = await modelsRes.json();
|
||||
setElevenLabsVoices(voicesData.voices || []);
|
||||
setElevenLabsModels(modelsData.filter(m => m.can_do_text_to_speech) || []);
|
||||
localStorage.setItem('elevenlabs_api_key', key);
|
||||
} catch (err) {
|
||||
setError(err.message);
|
||||
} finally {
|
||||
setIsLoadingElevenLabs(false);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (elevenLabsApiKey && character.tts.engine === 'elevenlabs') {
|
||||
fetchElevenLabsData(elevenLabsApiKey);
|
||||
}
|
||||
}, [character.tts.engine]);
|
||||
|
||||
// Cleanup audio on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (audioRef.current) { audioRef.current.pause(); audioRef.current = null; }
|
||||
if (objectUrlRef.current) { URL.revokeObjectURL(objectUrlRef.current); }
|
||||
window.speechSynthesis.cancel();
|
||||
};
|
||||
}, []);
|
||||
|
||||
const handleExport = () => {
|
||||
try {
|
||||
validateCharacter(character);
|
||||
setError(null);
|
||||
const dataStr = "data:text/json;charset=utf-8," + encodeURIComponent(JSON.stringify(character, null, 2));
|
||||
const a = document.createElement('a');
|
||||
a.href = dataStr;
|
||||
a.download = `${character.name || 'character'}.json`;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
a.remove();
|
||||
} catch (err) {
|
||||
setError(err.message);
|
||||
}
|
||||
};
|
||||
|
||||
const handleSaveToProfiles = () => {
|
||||
try {
|
||||
validateCharacter(character);
|
||||
setError(null);
|
||||
|
||||
const profileId = sessionStorage.getItem('edit_character_profile_id');
|
||||
const storageKey = 'homeai_characters';
|
||||
const raw = localStorage.getItem(storageKey);
|
||||
let profiles = raw ? JSON.parse(raw) : [];
|
||||
|
||||
if (profileId) {
|
||||
profiles = profiles.map(p =>
|
||||
p.id === profileId ? { ...p, data: character } : p
|
||||
);
|
||||
sessionStorage.removeItem('edit_character_profile_id');
|
||||
} else {
|
||||
const id = character.name + '_' + Date.now();
|
||||
profiles.push({ id, data: character, image: null, addedAt: new Date().toISOString() });
|
||||
}
|
||||
|
||||
localStorage.setItem(storageKey, JSON.stringify(profiles));
|
||||
setSaved(true);
|
||||
setTimeout(() => setSaved(false), 2000);
|
||||
} catch (err) {
|
||||
setError(err.message);
|
||||
}
|
||||
};
|
||||
|
||||
const handleImport = (e) => {
|
||||
const file = e.target.files[0];
|
||||
if (!file) return;
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => {
|
||||
try {
|
||||
const importedChar = JSON.parse(e.target.result);
|
||||
validateCharacter(importedChar);
|
||||
setCharacter(importedChar);
|
||||
setError(null);
|
||||
} catch (err) {
|
||||
setError(`Import failed: ${err.message}`);
|
||||
}
|
||||
};
|
||||
reader.readAsText(file);
|
||||
};
|
||||
|
||||
const handleChange = (field, value) => {
|
||||
setCharacter(prev => ({ ...prev, [field]: value }));
|
||||
};
|
||||
|
||||
const handleNestedChange = (parent, field, value) => {
|
||||
setCharacter(prev => ({
|
||||
...prev,
|
||||
[parent]: { ...prev[parent], [field]: value }
|
||||
}));
|
||||
};
|
||||
|
||||
const handleRuleChange = (index, field, value) => {
|
||||
setCharacter(prev => {
|
||||
const newRules = [...(prev.custom_rules || [])];
|
||||
newRules[index] = { ...newRules[index], [field]: value };
|
||||
return { ...prev, custom_rules: newRules };
|
||||
});
|
||||
};
|
||||
|
||||
const addRule = () => {
|
||||
setCharacter(prev => ({
|
||||
...prev,
|
||||
custom_rules: [...(prev.custom_rules || []), { trigger: "", response: "", condition: "" }]
|
||||
}));
|
||||
};
|
||||
|
||||
const removeRule = (index) => {
|
||||
setCharacter(prev => {
|
||||
const newRules = [...(prev.custom_rules || [])];
|
||||
newRules.splice(index, 1);
|
||||
return { ...prev, custom_rules: newRules };
|
||||
});
|
||||
};
|
||||
|
||||
const stopPreview = () => {
|
||||
if (audioRef.current) {
|
||||
audioRef.current.pause();
|
||||
audioRef.current = null;
|
||||
}
|
||||
if (objectUrlRef.current) {
|
||||
URL.revokeObjectURL(objectUrlRef.current);
|
||||
objectUrlRef.current = null;
|
||||
}
|
||||
window.speechSynthesis.cancel();
|
||||
setTtsState('idle');
|
||||
};
|
||||
|
||||
const previewTTS = async () => {
|
||||
stopPreview();
|
||||
const text = previewText || `Hi, I am ${character.display_name}. This is a preview of my voice.`;
|
||||
|
||||
if (character.tts.engine === 'kokoro') {
|
||||
setTtsState('loading');
|
||||
let blob;
|
||||
try {
|
||||
const response = await fetch('/api/tts', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ text, voice: character.tts.kokoro_voice })
|
||||
});
|
||||
if (!response.ok) throw new Error('TTS bridge returned ' + response.status);
|
||||
blob = await response.blob();
|
||||
} catch (err) {
|
||||
setTtsState('idle');
|
||||
setError(`Kokoro preview failed: ${err.message}. Falling back to browser TTS.`);
|
||||
runBrowserTTS(text);
|
||||
return;
|
||||
}
|
||||
const url = URL.createObjectURL(blob);
|
||||
objectUrlRef.current = url;
|
||||
const audio = new Audio(url);
|
||||
audio.playbackRate = character.tts.speed;
|
||||
audio.onended = () => { stopPreview(); };
|
||||
audio.onerror = () => { stopPreview(); };
|
||||
audioRef.current = audio;
|
||||
setTtsState('playing');
|
||||
audio.play().catch(() => { /* interrupted — stopPreview already handles cleanup */ });
|
||||
} else {
|
||||
runBrowserTTS(text);
|
||||
}
|
||||
};
|
||||
|
||||
const runBrowserTTS = (text) => {
|
||||
const utterance = new SpeechSynthesisUtterance(text);
|
||||
utterance.rate = character.tts.speed;
|
||||
const voices = window.speechSynthesis.getVoices();
|
||||
const preferredVoice = voices.find(v => v.lang.startsWith('en') && v.name.includes('Female')) || voices.find(v => v.lang.startsWith('en'));
|
||||
if (preferredVoice) utterance.voice = preferredVoice;
|
||||
setTtsState('playing');
|
||||
utterance.onend = () => setTtsState('idle');
|
||||
window.speechSynthesis.cancel();
|
||||
window.speechSynthesis.speak(utterance);
|
||||
};
|
||||
|
||||
const inputClass = "w-full bg-gray-800 border border-gray-700 text-gray-200 p-2 rounded-lg focus:border-indigo-500 focus:ring-1 focus:ring-indigo-500 outline-none transition-colors";
|
||||
const selectClass = "w-full bg-gray-800 border border-gray-700 text-gray-200 p-2 rounded-lg focus:border-indigo-500 focus:ring-1 focus:ring-indigo-500 outline-none transition-colors";
|
||||
const labelClass = "block text-sm font-medium text-gray-400 mb-1";
|
||||
const cardClass = "bg-gray-900 border border-gray-800 p-5 rounded-xl space-y-4";
|
||||
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
<div className="flex justify-between items-center">
|
||||
<div>
|
||||
<h1 className="text-3xl font-bold text-gray-100">Character Editor</h1>
|
||||
<p className="text-sm text-gray-500 mt-1">
|
||||
Editing: {character.display_name || character.name}
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex gap-3">
|
||||
<label className="cursor-pointer flex items-center gap-2 px-4 py-2 bg-gray-800 hover:bg-gray-700 text-gray-300 rounded-lg border border-gray-700 transition-colors">
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M3 16.5v2.25A2.25 2.25 0 005.25 21h13.5A2.25 2.25 0 0021 18.75V16.5m-13.5-9L12 3m0 0l4.5 4.5M12 3v13.5" />
|
||||
</svg>
|
||||
Import
|
||||
<input type="file" accept=".json" className="hidden" onChange={handleImport} />
|
||||
</label>
|
||||
<button
|
||||
onClick={handleSaveToProfiles}
|
||||
className={`flex items-center gap-2 px-4 py-2 rounded-lg transition-colors ${
|
||||
saved
|
||||
? 'bg-emerald-600 text-white'
|
||||
: 'bg-indigo-600 hover:bg-indigo-500 text-white'
|
||||
}`}
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
{saved
|
||||
? <path strokeLinecap="round" strokeLinejoin="round" d="M4.5 12.75l6 6 9-13.5" />
|
||||
: <path strokeLinecap="round" strokeLinejoin="round" d="M17.593 3.322c1.1.128 1.907 1.077 1.907 2.185V21L12 17.25 4.5 21V5.507c0-1.108.806-2.057 1.907-2.185a48.507 48.507 0 0111.186 0z" />
|
||||
}
|
||||
</svg>
|
||||
{saved ? 'Saved' : 'Save to Profiles'}
|
||||
</button>
|
||||
<button
|
||||
onClick={handleExport}
|
||||
className="flex items-center gap-2 px-4 py-2 bg-gray-800 hover:bg-gray-700 text-gray-300 rounded-lg border border-gray-700 transition-colors"
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M3 16.5v2.25A2.25 2.25 0 005.25 21h13.5A2.25 2.25 0 0021 18.75V16.5M16.5 12L12 16.5m0 0L7.5 12m4.5 4.5V3" />
|
||||
</svg>
|
||||
Export JSON
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="bg-red-900/30 border border-red-500/50 text-red-300 px-4 py-3 rounded-lg text-sm">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-6">
|
||||
{/* Basic Info */}
|
||||
<div className={cardClass}>
|
||||
<h2 className="text-lg font-semibold text-gray-200">Basic Info</h2>
|
||||
<div>
|
||||
<label className={labelClass}>Name (ID)</label>
|
||||
<input type="text" className={inputClass} value={character.name} onChange={(e) => handleChange('name', e.target.value)} />
|
||||
</div>
|
||||
<div>
|
||||
<label className={labelClass}>Display Name</label>
|
||||
<input type="text" className={inputClass} value={character.display_name} onChange={(e) => handleChange('display_name', e.target.value)} />
|
||||
</div>
|
||||
<div>
|
||||
<label className={labelClass}>Description</label>
|
||||
<input type="text" className={inputClass} value={character.description} onChange={(e) => handleChange('description', e.target.value)} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* TTS Configuration */}
|
||||
<div className={cardClass}>
|
||||
<h2 className="text-lg font-semibold text-gray-200">TTS Configuration</h2>
|
||||
<div>
|
||||
<label className={labelClass}>Engine</label>
|
||||
<select className={selectClass} value={character.tts.engine} onChange={(e) => handleNestedChange('tts', 'engine', e.target.value)}>
|
||||
<option value="kokoro">Kokoro</option>
|
||||
<option value="chatterbox">Chatterbox</option>
|
||||
<option value="qwen3">Qwen3</option>
|
||||
<option value="elevenlabs">ElevenLabs</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
{character.tts.engine === 'elevenlabs' && (
|
||||
<div className="space-y-4 border border-gray-700 p-4 rounded-lg bg-gray-800/50">
|
||||
<div>
|
||||
<label className="block text-xs font-medium mb-1 text-gray-500">ElevenLabs API Key (Local Use Only)</label>
|
||||
<div className="flex gap-2">
|
||||
<input type="password" placeholder="sk_..." className={inputClass + " text-sm"} value={elevenLabsApiKey} onChange={(e) => setElevenLabsApiKey(e.target.value)} />
|
||||
<button onClick={() => fetchElevenLabsData(elevenLabsApiKey)} disabled={isLoadingElevenLabs} className="bg-indigo-600 text-white px-3 py-1 rounded-lg text-sm whitespace-nowrap hover:bg-indigo-500 disabled:opacity-50 transition-colors">
|
||||
{isLoadingElevenLabs ? 'Loading...' : 'Fetch'}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<label className={labelClass}>Voice ID</label>
|
||||
{elevenLabsVoices.length > 0 ? (
|
||||
<select className={selectClass} value={character.tts.elevenlabs_voice_id || ''} onChange={(e) => handleNestedChange('tts', 'elevenlabs_voice_id', e.target.value)}>
|
||||
<option value="">-- Select Voice --</option>
|
||||
{elevenLabsVoices.map(v => (
|
||||
<option key={v.voice_id} value={v.voice_id}>{v.name} ({v.category})</option>
|
||||
))}
|
||||
</select>
|
||||
) : (
|
||||
<input type="text" className={inputClass} value={character.tts.elevenlabs_voice_id || ''} onChange={(e) => handleNestedChange('tts', 'elevenlabs_voice_id', e.target.value)} placeholder="e.g. 21m00Tcm4TlvDq8ikWAM" />
|
||||
)}
|
||||
</div>
|
||||
<div>
|
||||
<label className={labelClass}>Model</label>
|
||||
{elevenLabsModels.length > 0 ? (
|
||||
<select className={selectClass} value={character.tts.elevenlabs_model || 'eleven_monolingual_v1'} onChange={(e) => handleNestedChange('tts', 'elevenlabs_model', e.target.value)}>
|
||||
<option value="">-- Select Model --</option>
|
||||
{elevenLabsModels.map(m => (
|
||||
<option key={m.model_id} value={m.model_id}>{m.name} ({m.model_id})</option>
|
||||
))}
|
||||
</select>
|
||||
) : (
|
||||
<input type="text" className={inputClass} value={character.tts.elevenlabs_model || 'eleven_monolingual_v1'} onChange={(e) => handleNestedChange('tts', 'elevenlabs_model', e.target.value)} placeholder="e.g. eleven_monolingual_v1" />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{character.tts.engine === 'kokoro' && (
|
||||
<div>
|
||||
<label className={labelClass}>Kokoro Voice</label>
|
||||
<select className={selectClass} value={character.tts.kokoro_voice || 'af_heart'} onChange={(e) => handleNestedChange('tts', 'kokoro_voice', e.target.value)}>
|
||||
<option value="af_heart">af_heart (American Female)</option>
|
||||
<option value="af_alloy">af_alloy (American Female)</option>
|
||||
<option value="af_aoede">af_aoede (American Female)</option>
|
||||
<option value="af_bella">af_bella (American Female)</option>
|
||||
<option value="af_jessica">af_jessica (American Female)</option>
|
||||
<option value="af_kore">af_kore (American Female)</option>
|
||||
<option value="af_nicole">af_nicole (American Female)</option>
|
||||
<option value="af_nova">af_nova (American Female)</option>
|
||||
<option value="af_river">af_river (American Female)</option>
|
||||
<option value="af_sarah">af_sarah (American Female)</option>
|
||||
<option value="af_sky">af_sky (American Female)</option>
|
||||
<option value="am_adam">am_adam (American Male)</option>
|
||||
<option value="am_echo">am_echo (American Male)</option>
|
||||
<option value="am_eric">am_eric (American Male)</option>
|
||||
<option value="am_fenrir">am_fenrir (American Male)</option>
|
||||
<option value="am_liam">am_liam (American Male)</option>
|
||||
<option value="am_michael">am_michael (American Male)</option>
|
||||
<option value="am_onyx">am_onyx (American Male)</option>
|
||||
<option value="am_puck">am_puck (American Male)</option>
|
||||
<option value="am_santa">am_santa (American Male)</option>
|
||||
<option value="bf_alice">bf_alice (British Female)</option>
|
||||
<option value="bf_emma">bf_emma (British Female)</option>
|
||||
<option value="bf_isabella">bf_isabella (British Female)</option>
|
||||
<option value="bf_lily">bf_lily (British Female)</option>
|
||||
<option value="bm_daniel">bm_daniel (British Male)</option>
|
||||
<option value="bm_fable">bm_fable (British Male)</option>
|
||||
<option value="bm_george">bm_george (British Male)</option>
|
||||
<option value="bm_lewis">bm_lewis (British Male)</option>
|
||||
</select>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{character.tts.engine === 'chatterbox' && (
|
||||
<div>
|
||||
<label className={labelClass}>Voice Reference Path</label>
|
||||
<input type="text" className={inputClass} value={character.tts.voice_ref_path || ''} onChange={(e) => handleNestedChange('tts', 'voice_ref_path', e.target.value)} />
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div>
|
||||
<label className={labelClass}>Speed: {character.tts.speed}</label>
|
||||
<input type="range" min="0.5" max="2.0" step="0.1" className="w-full accent-indigo-500" value={character.tts.speed} onChange={(e) => handleNestedChange('tts', 'speed', parseFloat(e.target.value))} />
|
||||
</div>
|
||||
<div>
|
||||
<label className={labelClass}>Preview Text</label>
|
||||
<input
|
||||
type="text"
|
||||
className={inputClass}
|
||||
value={previewText}
|
||||
onChange={(e) => setPreviewText(e.target.value)}
|
||||
placeholder={`Hi, I am ${character.display_name}. This is a preview of my voice.`}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex gap-2">
|
||||
<button
|
||||
onClick={previewTTS}
|
||||
disabled={ttsState === 'loading'}
|
||||
className={`flex-1 flex items-center justify-center gap-2 px-4 py-2 rounded-lg transition-colors ${
|
||||
ttsState === 'loading'
|
||||
? 'bg-indigo-800 text-indigo-300 cursor-wait'
|
||||
: ttsState === 'playing'
|
||||
? 'bg-emerald-600 hover:bg-emerald-500 text-white'
|
||||
: 'bg-indigo-600 hover:bg-indigo-500 text-white'
|
||||
}`}
|
||||
>
|
||||
{ttsState === 'loading' && (
|
||||
<svg className="w-4 h-4 animate-spin" viewBox="0 0 24 24" fill="none">
|
||||
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4" />
|
||||
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4z" />
|
||||
</svg>
|
||||
)}
|
||||
{ttsState === 'loading' ? 'Synthesizing...' : ttsState === 'playing' ? 'Playing...' : 'Preview Voice'}
|
||||
</button>
|
||||
{ttsState !== 'idle' && (
|
||||
<button
|
||||
onClick={stopPreview}
|
||||
className="px-4 py-2 bg-red-600 hover:bg-red-500 text-white rounded-lg transition-colors"
|
||||
>
|
||||
Stop
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
<p className="text-xs text-gray-600">
|
||||
{character.tts.engine === 'kokoro'
|
||||
? 'Previews via local Kokoro TTS bridge (port 8081 → Wyoming 10301).'
|
||||
: 'Uses browser TTS for preview. Local TTS available with Kokoro engine.'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* System Prompt */}
|
||||
<div className={cardClass}>
|
||||
<div className="flex justify-between items-center">
|
||||
<h2 className="text-lg font-semibold text-gray-200">System Prompt</h2>
|
||||
<span className="text-xs text-gray-600">{character.system_prompt.length} chars</span>
|
||||
</div>
|
||||
<textarea
|
||||
className={inputClass + " h-32 resize-y"}
|
||||
value={character.system_prompt}
|
||||
onChange={(e) => handleChange('system_prompt', e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-6">
|
||||
{/* Live2D Expressions */}
|
||||
<div className={cardClass}>
|
||||
<h2 className="text-lg font-semibold text-gray-200">Live2D Expressions</h2>
|
||||
{Object.entries(character.live2d_expressions).map(([key, val]) => (
|
||||
<div key={key} className="flex justify-between items-center gap-4">
|
||||
<label className="text-sm font-medium text-gray-400 w-1/3 capitalize">{key}</label>
|
||||
<input type="text" className={inputClass + " w-2/3"} value={val} onChange={(e) => handleNestedChange('live2d_expressions', key, e.target.value)} />
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* Model Overrides */}
|
||||
<div className={cardClass}>
|
||||
<h2 className="text-lg font-semibold text-gray-200">Model Overrides</h2>
|
||||
<div>
|
||||
<label className={labelClass}>Primary Model</label>
|
||||
<select className={selectClass} value={character.model_overrides?.primary || 'llama3.3:70b'} onChange={(e) => handleNestedChange('model_overrides', 'primary', e.target.value)}>
|
||||
<option value="llama3.3:70b">llama3.3:70b</option>
|
||||
<option value="qwen2.5:7b">qwen2.5:7b</option>
|
||||
<option value="qwen3:32b">qwen3:32b</option>
|
||||
<option value="codestral:22b">codestral:22b</option>
|
||||
<option value="gemma-3-27b">gemma-3-27b</option>
|
||||
<option value="DeepSeek-R1-8B">DeepSeek-R1-8B</option>
|
||||
</select>
|
||||
</div>
|
||||
<div>
|
||||
<label className={labelClass}>Fast Model</label>
|
||||
<select className={selectClass} value={character.model_overrides?.fast || 'qwen2.5:7b'} onChange={(e) => handleNestedChange('model_overrides', 'fast', e.target.value)}>
|
||||
<option value="qwen2.5:7b">qwen2.5:7b</option>
|
||||
<option value="llama3.3:70b">llama3.3:70b</option>
|
||||
<option value="qwen3:32b">qwen3:32b</option>
|
||||
<option value="codestral:22b">codestral:22b</option>
|
||||
<option value="gemma-3-27b">gemma-3-27b</option>
|
||||
<option value="DeepSeek-R1-8B">DeepSeek-R1-8B</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Custom Rules */}
|
||||
<div className={cardClass}>
|
||||
<div className="flex justify-between items-center">
|
||||
<h2 className="text-lg font-semibold text-gray-200">Custom Rules</h2>
|
||||
<button onClick={addRule} className="flex items-center gap-1 bg-indigo-600 hover:bg-indigo-500 text-white px-3 py-1.5 rounded-lg text-sm transition-colors">
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M12 4.5v15m7.5-7.5h-15" />
|
||||
</svg>
|
||||
Add Rule
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{(!character.custom_rules || character.custom_rules.length === 0) ? (
|
||||
<p className="text-sm text-gray-600 italic">No custom rules defined.</p>
|
||||
) : (
|
||||
<div className="space-y-4">
|
||||
{character.custom_rules.map((rule, idx) => (
|
||||
<div key={idx} className="border border-gray-700 p-4 rounded-lg relative bg-gray-800/50">
|
||||
<button
|
||||
onClick={() => removeRule(idx)}
|
||||
className="absolute top-3 right-3 text-gray-500 hover:text-red-400 transition-colors"
|
||||
title="Remove Rule"
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M6 18L18 6M6 6l12 12" />
|
||||
</svg>
|
||||
</button>
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-4 mt-1">
|
||||
<div>
|
||||
<label className="block text-xs font-medium mb-1 text-gray-500">Trigger</label>
|
||||
<input type="text" className={inputClass + " text-sm"} value={rule.trigger || ''} onChange={(e) => handleRuleChange(idx, 'trigger', e.target.value)} />
|
||||
</div>
|
||||
<div>
|
||||
<label className="block text-xs font-medium mb-1 text-gray-500">Condition (Optional)</label>
|
||||
<input type="text" className={inputClass + " text-sm"} value={rule.condition || ''} onChange={(e) => handleRuleChange(idx, 'condition', e.target.value)} placeholder="e.g. time_of_day == morning" />
|
||||
</div>
|
||||
<div className="md:col-span-2">
|
||||
<label className="block text-xs font-medium mb-1 text-gray-500">Response</label>
|
||||
<textarea className={inputClass + " text-sm h-16 resize-y"} value={rule.response || ''} onChange={(e) => handleRuleChange(idx, 'response', e.target.value)} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
297
homeai-character/src/CharacterProfiles.jsx
Normal file
297
homeai-character/src/CharacterProfiles.jsx
Normal file
@@ -0,0 +1,297 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { validateCharacter } from './SchemaValidator';
|
||||
|
||||
const STORAGE_KEY = 'homeai_characters';
|
||||
const ACTIVE_KEY = 'homeai_active_character';
|
||||
|
||||
function loadProfiles() {
|
||||
try {
|
||||
const raw = localStorage.getItem(STORAGE_KEY);
|
||||
return raw ? JSON.parse(raw) : [];
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function saveProfiles(profiles) {
|
||||
localStorage.setItem(STORAGE_KEY, JSON.stringify(profiles));
|
||||
}
|
||||
|
||||
function getActiveId() {
|
||||
return localStorage.getItem(ACTIVE_KEY) || null;
|
||||
}
|
||||
|
||||
function setActiveId(id) {
|
||||
localStorage.setItem(ACTIVE_KEY, id);
|
||||
}
|
||||
|
||||
export default function CharacterProfiles() {
|
||||
const [profiles, setProfiles] = useState(loadProfiles);
|
||||
const [activeId, setActive] = useState(getActiveId);
|
||||
const [error, setError] = useState(null);
|
||||
const [dragOver, setDragOver] = useState(false);
|
||||
const navigate = useNavigate();
|
||||
|
||||
useEffect(() => {
|
||||
saveProfiles(profiles);
|
||||
}, [profiles]);
|
||||
|
||||
const handleImport = (e) => {
|
||||
const files = Array.from(e.target?.files || []);
|
||||
importFiles(files);
|
||||
if (e.target) e.target.value = '';
|
||||
};
|
||||
|
||||
const importFiles = (files) => {
|
||||
files.forEach(file => {
|
||||
if (!file.name.endsWith('.json')) return;
|
||||
const reader = new FileReader();
|
||||
reader.onload = (ev) => {
|
||||
try {
|
||||
const data = JSON.parse(ev.target.result);
|
||||
validateCharacter(data);
|
||||
const id = data.name + '_' + Date.now();
|
||||
setProfiles(prev => [...prev, { id, data, image: null, addedAt: new Date().toISOString() }]);
|
||||
setError(null);
|
||||
} catch (err) {
|
||||
setError(`Import failed for ${file.name}: ${err.message}`);
|
||||
}
|
||||
};
|
||||
reader.readAsText(file);
|
||||
});
|
||||
};
|
||||
|
||||
const handleDrop = (e) => {
|
||||
e.preventDefault();
|
||||
setDragOver(false);
|
||||
const files = Array.from(e.dataTransfer.files);
|
||||
importFiles(files);
|
||||
};
|
||||
|
||||
const handleImageUpload = (profileId, e) => {
|
||||
const file = e.target.files[0];
|
||||
if (!file) return;
|
||||
const reader = new FileReader();
|
||||
reader.onload = (ev) => {
|
||||
setProfiles(prev =>
|
||||
prev.map(p => p.id === profileId ? { ...p, image: ev.target.result } : p)
|
||||
);
|
||||
};
|
||||
reader.readAsDataURL(file);
|
||||
};
|
||||
|
||||
const removeProfile = (id) => {
|
||||
setProfiles(prev => prev.filter(p => p.id !== id));
|
||||
if (activeId === id) {
|
||||
setActive(null);
|
||||
localStorage.removeItem(ACTIVE_KEY);
|
||||
}
|
||||
};
|
||||
|
||||
const activateProfile = (id) => {
|
||||
setActive(id);
|
||||
setActiveId(id);
|
||||
};
|
||||
|
||||
const exportProfile = (profile) => {
|
||||
const dataStr = "data:text/json;charset=utf-8," + encodeURIComponent(JSON.stringify(profile.data, null, 2));
|
||||
const a = document.createElement('a');
|
||||
a.href = dataStr;
|
||||
a.download = `${profile.data.name || 'character'}.json`;
|
||||
a.click();
|
||||
};
|
||||
|
||||
const editProfile = (profile) => {
|
||||
// Store the profile data for the editor to pick up
|
||||
sessionStorage.setItem('edit_character', JSON.stringify(profile.data));
|
||||
sessionStorage.setItem('edit_character_profile_id', profile.id);
|
||||
navigate('/editor');
|
||||
};
|
||||
|
||||
const activeProfile = profiles.find(p => p.id === activeId);
|
||||
|
||||
return (
|
||||
<div className="space-y-8">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<h1 className="text-3xl font-bold text-gray-100">Characters</h1>
|
||||
<p className="text-sm text-gray-500 mt-1">
|
||||
{profiles.length} profile{profiles.length !== 1 ? 's' : ''} stored
|
||||
{activeProfile && (
|
||||
<span className="ml-2 text-emerald-400">
|
||||
Active: {activeProfile.data.display_name || activeProfile.data.name}
|
||||
</span>
|
||||
)}
|
||||
</p>
|
||||
</div>
|
||||
<label className="flex items-center gap-2 px-4 py-2 bg-indigo-600 hover:bg-indigo-500 text-white rounded-lg cursor-pointer transition-colors">
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M12 4.5v15m7.5-7.5h-15" />
|
||||
</svg>
|
||||
Import JSON
|
||||
<input type="file" accept=".json" multiple className="hidden" onChange={handleImport} />
|
||||
</label>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="bg-red-900/30 border border-red-500/50 text-red-300 px-4 py-3 rounded-lg text-sm">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Drop zone */}
|
||||
<div
|
||||
onDragOver={(e) => { e.preventDefault(); setDragOver(true); }}
|
||||
onDragLeave={() => setDragOver(false)}
|
||||
onDrop={handleDrop}
|
||||
className={`border-2 border-dashed rounded-xl p-8 text-center transition-colors ${
|
||||
dragOver
|
||||
? 'border-indigo-500 bg-indigo-500/10'
|
||||
: 'border-gray-700 hover:border-gray-600'
|
||||
}`}
|
||||
>
|
||||
<svg className="w-10 h-10 mx-auto text-gray-600 mb-3" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M3 16.5v2.25A2.25 2.25 0 005.25 21h13.5A2.25 2.25 0 0021 18.75V16.5m-13.5-9L12 3m0 0l4.5 4.5M12 3v13.5" />
|
||||
</svg>
|
||||
<p className="text-gray-500 text-sm">Drop character JSON files here to import</p>
|
||||
</div>
|
||||
|
||||
{/* Profile grid */}
|
||||
{profiles.length === 0 ? (
|
||||
<div className="text-center py-16">
|
||||
<svg className="w-16 h-16 mx-auto text-gray-700 mb-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M15.75 6a3.75 3.75 0 11-7.5 0 3.75 3.75 0 017.5 0zM4.501 20.118a7.5 7.5 0 0114.998 0A17.933 17.933 0 0112 21.75c-2.676 0-5.216-.584-7.499-1.632z" />
|
||||
</svg>
|
||||
<p className="text-gray-500">No character profiles yet. Import a JSON file to get started.</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
|
||||
{profiles.map(profile => {
|
||||
const isActive = profile.id === activeId;
|
||||
const char = profile.data;
|
||||
return (
|
||||
<div
|
||||
key={profile.id}
|
||||
className={`relative rounded-xl border overflow-hidden transition-all duration-200 ${
|
||||
isActive
|
||||
? 'border-emerald-500/60 bg-emerald-500/5 ring-1 ring-emerald-500/30'
|
||||
: 'border-gray-700 bg-gray-800/50 hover:border-gray-600'
|
||||
}`}
|
||||
>
|
||||
{/* Image area */}
|
||||
<div className="relative h-48 bg-gray-900 flex items-center justify-center overflow-hidden group">
|
||||
{profile.image ? (
|
||||
<img
|
||||
src={profile.image}
|
||||
alt={char.display_name || char.name}
|
||||
className="w-full h-full object-cover"
|
||||
/>
|
||||
) : (
|
||||
<div className="text-6xl font-bold text-gray-700 select-none">
|
||||
{(char.display_name || char.name || '?')[0].toUpperCase()}
|
||||
</div>
|
||||
)}
|
||||
{/* Image upload overlay */}
|
||||
<label className="absolute inset-0 flex items-center justify-center bg-black/50 opacity-0 group-hover:opacity-100 transition-opacity cursor-pointer">
|
||||
<div className="text-center">
|
||||
<svg className="w-8 h-8 mx-auto text-white/80 mb-1" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M6.827 6.175A2.31 2.31 0 015.186 7.23c-.38.054-.757.112-1.134.175C2.999 7.58 2.25 8.507 2.25 9.574V18a2.25 2.25 0 002.25 2.25h15A2.25 2.25 0 0021.75 18V9.574c0-1.067-.75-1.994-1.802-2.169a47.865 47.865 0 00-1.134-.175 2.31 2.31 0 01-1.64-1.055l-.822-1.316a2.192 2.192 0 00-1.736-1.039 48.774 48.774 0 00-5.232 0 2.192 2.192 0 00-1.736 1.039l-.821 1.316z" />
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M16.5 12.75a4.5 4.5 0 11-9 0 4.5 4.5 0 019 0z" />
|
||||
</svg>
|
||||
<span className="text-xs text-white/70">Change image</span>
|
||||
</div>
|
||||
<input
|
||||
type="file"
|
||||
accept="image/*"
|
||||
className="hidden"
|
||||
onChange={(e) => handleImageUpload(profile.id, e)}
|
||||
/>
|
||||
</label>
|
||||
{/* Active badge */}
|
||||
{isActive && (
|
||||
<span className="absolute top-2 right-2 px-2 py-0.5 bg-emerald-500 text-white text-xs font-medium rounded-full">
|
||||
Active
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Info */}
|
||||
<div className="p-4 space-y-3">
|
||||
<div>
|
||||
<h3 className="text-lg font-semibold text-gray-200">
|
||||
{char.display_name || char.name}
|
||||
</h3>
|
||||
<p className="text-xs text-gray-500 mt-0.5">{char.description}</p>
|
||||
</div>
|
||||
|
||||
{/* Meta chips */}
|
||||
<div className="flex flex-wrap gap-1.5">
|
||||
<span className="px-2 py-0.5 bg-gray-700/70 text-gray-400 text-xs rounded-full">
|
||||
{char.tts?.engine || 'kokoro'}
|
||||
</span>
|
||||
<span className="px-2 py-0.5 bg-gray-700/70 text-gray-400 text-xs rounded-full">
|
||||
{char.model_overrides?.primary || 'default'}
|
||||
</span>
|
||||
{char.tts?.kokoro_voice && (
|
||||
<span className="px-2 py-0.5 bg-gray-700/70 text-gray-400 text-xs rounded-full">
|
||||
{char.tts.kokoro_voice}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Actions */}
|
||||
<div className="flex gap-2 pt-1">
|
||||
{!isActive ? (
|
||||
<button
|
||||
onClick={() => activateProfile(profile.id)}
|
||||
className="flex-1 px-3 py-1.5 bg-emerald-600 hover:bg-emerald-500 text-white text-sm rounded-lg transition-colors"
|
||||
>
|
||||
Activate
|
||||
</button>
|
||||
) : (
|
||||
<button
|
||||
disabled
|
||||
className="flex-1 px-3 py-1.5 bg-gray-700 text-gray-500 text-sm rounded-lg cursor-not-allowed"
|
||||
>
|
||||
Active
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
onClick={() => editProfile(profile)}
|
||||
className="px-3 py-1.5 bg-gray-700 hover:bg-gray-600 text-gray-300 text-sm rounded-lg transition-colors"
|
||||
title="Edit"
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M16.862 4.487l1.687-1.688a1.875 1.875 0 112.652 2.652L10.582 16.07a4.5 4.5 0 01-1.897 1.13L6 18l.8-2.685a4.5 4.5 0 011.13-1.897l8.932-8.931zm0 0L19.5 7.125M18 14v4.75A2.25 2.25 0 0115.75 21H5.25A2.25 2.25 0 013 18.75V8.25A2.25 2.25 0 015.25 6H10" />
|
||||
</svg>
|
||||
</button>
|
||||
<button
|
||||
onClick={() => exportProfile(profile)}
|
||||
className="px-3 py-1.5 bg-gray-700 hover:bg-gray-600 text-gray-300 text-sm rounded-lg transition-colors"
|
||||
title="Export"
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M3 16.5v2.25A2.25 2.25 0 005.25 21h13.5A2.25 2.25 0 0021 18.75V16.5M16.5 12L12 16.5m0 0L7.5 12m4.5 4.5V3" />
|
||||
</svg>
|
||||
</button>
|
||||
<button
|
||||
onClick={() => removeProfile(profile.id)}
|
||||
className="px-3 py-1.5 bg-gray-700 hover:bg-red-600 text-gray-300 hover:text-white text-sm rounded-lg transition-colors"
|
||||
title="Delete"
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M14.74 9l-.346 9m-4.788 0L9.26 9m9.968-3.21c.342.052.682.107 1.022.166m-1.022-.165L18.16 19.673a2.25 2.25 0 01-2.244 2.077H8.084a2.25 2.25 0 01-2.244-2.077L4.772 5.79m14.456 0a48.108 48.108 0 00-3.478-.397m-12 .562c.34-.059.68-.114 1.022-.165m0 0a48.11 48.11 0 013.478-.397m7.5 0v-.916c0-1.18-.91-2.164-2.09-2.201a51.964 51.964 0 00-3.32 0c-1.18.037-2.09 1.022-2.09 2.201v.916m7.5 0a48.667 48.667 0 00-7.5 0" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
13
homeai-character/src/SchemaValidator.js
Normal file
13
homeai-character/src/SchemaValidator.js
Normal file
@@ -0,0 +1,13 @@
|
||||
import Ajv from 'ajv'
|
||||
import schema from '../schema/character.schema.json'
|
||||
|
||||
const ajv = new Ajv({ allErrors: true, strict: false })
|
||||
const validate = ajv.compile(schema)
|
||||
|
||||
export function validateCharacter(config) {
|
||||
const valid = validate(config)
|
||||
if (!valid) {
|
||||
throw new Error(ajv.errorsText(validate.errors))
|
||||
}
|
||||
return true
|
||||
}
|
||||
389
homeai-character/src/ServiceStatus.jsx
Normal file
389
homeai-character/src/ServiceStatus.jsx
Normal file
@@ -0,0 +1,389 @@
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
|
||||
const SERVICES = [
|
||||
{
|
||||
name: 'Ollama',
|
||||
url: 'http://localhost:11434',
|
||||
healthPath: '/api/tags',
|
||||
uiUrl: null,
|
||||
description: 'Local LLM runtime',
|
||||
category: 'AI & LLM',
|
||||
restart: { type: 'launchd', id: 'gui/501/com.homeai.ollama' },
|
||||
},
|
||||
{
|
||||
name: 'Open WebUI',
|
||||
url: 'http://localhost:3030',
|
||||
healthPath: '/',
|
||||
uiUrl: 'http://localhost:3030',
|
||||
description: 'Chat interface',
|
||||
category: 'AI & LLM',
|
||||
restart: { type: 'docker', id: 'homeai-open-webui' },
|
||||
},
|
||||
{
|
||||
name: 'OpenClaw Gateway',
|
||||
url: 'http://localhost:8080',
|
||||
healthPath: '/',
|
||||
uiUrl: null,
|
||||
description: 'Agent gateway',
|
||||
category: 'Agent',
|
||||
restart: { type: 'launchd', id: 'gui/501/com.homeai.openclaw' },
|
||||
},
|
||||
{
|
||||
name: 'OpenClaw Bridge',
|
||||
url: 'http://localhost:8081',
|
||||
healthPath: '/',
|
||||
uiUrl: null,
|
||||
description: 'HTTP-to-CLI bridge',
|
||||
category: 'Agent',
|
||||
restart: { type: 'launchd', id: 'gui/501/com.homeai.openclaw-bridge' },
|
||||
},
|
||||
{
|
||||
name: 'Wyoming STT',
|
||||
url: 'http://localhost:10300',
|
||||
healthPath: '/',
|
||||
uiUrl: null,
|
||||
description: 'Whisper speech-to-text',
|
||||
category: 'Voice',
|
||||
tcp: true,
|
||||
restart: { type: 'launchd', id: 'gui/501/com.homeai.wyoming-stt' },
|
||||
},
|
||||
{
|
||||
name: 'Wyoming TTS',
|
||||
url: 'http://localhost:10301',
|
||||
healthPath: '/',
|
||||
uiUrl: null,
|
||||
description: 'Kokoro text-to-speech',
|
||||
category: 'Voice',
|
||||
tcp: true,
|
||||
restart: { type: 'launchd', id: 'gui/501/com.homeai.wyoming-tts' },
|
||||
},
|
||||
{
|
||||
name: 'Wyoming Satellite',
|
||||
url: 'http://localhost:10700',
|
||||
healthPath: '/',
|
||||
uiUrl: null,
|
||||
description: 'Mac Mini mic/speaker satellite',
|
||||
category: 'Voice',
|
||||
tcp: true,
|
||||
restart: { type: 'launchd', id: 'gui/501/com.homeai.wyoming-satellite' },
|
||||
},
|
||||
{
|
||||
name: 'Character Dashboard',
|
||||
url: 'http://localhost:5173',
|
||||
healthPath: '/',
|
||||
uiUrl: 'http://localhost:5173',
|
||||
description: 'Character manager & service status',
|
||||
category: 'Agent',
|
||||
restart: { type: 'launchd', id: 'gui/501/com.homeai.character-dashboard' },
|
||||
},
|
||||
{
|
||||
name: 'Home Assistant',
|
||||
url: 'https://10.0.0.199:8123',
|
||||
healthPath: '/api/',
|
||||
uiUrl: 'https://10.0.0.199:8123',
|
||||
description: 'Smart home platform',
|
||||
category: 'Smart Home',
|
||||
},
|
||||
{
|
||||
name: 'Uptime Kuma',
|
||||
url: 'http://localhost:3001',
|
||||
healthPath: '/',
|
||||
uiUrl: 'http://localhost:3001',
|
||||
description: 'Service health monitoring',
|
||||
category: 'Infrastructure',
|
||||
restart: { type: 'docker', id: 'homeai-uptime-kuma' },
|
||||
},
|
||||
{
|
||||
name: 'n8n',
|
||||
url: 'http://localhost:5678',
|
||||
healthPath: '/',
|
||||
uiUrl: 'http://localhost:5678',
|
||||
description: 'Workflow automation',
|
||||
category: 'Infrastructure',
|
||||
restart: { type: 'docker', id: 'homeai-n8n' },
|
||||
},
|
||||
{
|
||||
name: 'code-server',
|
||||
url: 'http://localhost:8090',
|
||||
healthPath: '/',
|
||||
uiUrl: 'http://localhost:8090',
|
||||
description: 'Browser-based VS Code',
|
||||
category: 'Infrastructure',
|
||||
restart: { type: 'docker', id: 'homeai-code-server' },
|
||||
},
|
||||
{
|
||||
name: 'Portainer',
|
||||
url: 'https://10.0.0.199:9443',
|
||||
healthPath: '/',
|
||||
uiUrl: 'https://10.0.0.199:9443',
|
||||
description: 'Docker management',
|
||||
category: 'Infrastructure',
|
||||
},
|
||||
{
|
||||
name: 'Gitea',
|
||||
url: 'http://10.0.0.199:3000',
|
||||
healthPath: '/',
|
||||
uiUrl: 'http://10.0.0.199:3000',
|
||||
description: 'Self-hosted Git',
|
||||
category: 'Infrastructure',
|
||||
},
|
||||
];
|
||||
|
||||
const CATEGORY_ICONS = {
|
||||
'AI & LLM': (
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M9.813 15.904L9 18.75l-.813-2.846a4.5 4.5 0 00-3.09-3.09L2.25 12l2.846-.813a4.5 4.5 0 003.09-3.09L9 5.25l.813 2.846a4.5 4.5 0 003.09 3.09L15.75 12l-2.846.813a4.5 4.5 0 00-3.09 3.09zM18.259 8.715L18 9.75l-.259-1.035a3.375 3.375 0 00-2.455-2.456L14.25 6l1.036-.259a3.375 3.375 0 002.455-2.456L18 2.25l.259 1.035a3.375 3.375 0 002.455 2.456L21.75 6l-1.036.259a3.375 3.375 0 00-2.455 2.456zM16.894 20.567L16.5 21.75l-.394-1.183a2.25 2.25 0 00-1.423-1.423L13.5 18.75l1.183-.394a2.25 2.25 0 001.423-1.423l.394-1.183.394 1.183a2.25 2.25 0 001.423 1.423l1.183.394-1.183.394a2.25 2.25 0 00-1.423 1.423z" />
|
||||
</svg>
|
||||
),
|
||||
'Agent': (
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M8.25 3v1.5M4.5 8.25H3m18 0h-1.5M4.5 12H3m18 0h-1.5m-15 3.75H3m18 0h-1.5M8.25 19.5V21M12 3v1.5m0 15V21m3.75-18v1.5m0 15V21m-9-1.5h10.5a2.25 2.25 0 002.25-2.25V6.75a2.25 2.25 0 00-2.25-2.25H6.75A2.25 2.25 0 004.5 6.75v10.5a2.25 2.25 0 002.25 2.25zm.75-12h9v9h-9v-9z" />
|
||||
</svg>
|
||||
),
|
||||
'Voice': (
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M12 18.75a6 6 0 006-6v-1.5m-6 7.5a6 6 0 01-6-6v-1.5m6 7.5v3.75m-3.75 0h7.5M12 15.75a3 3 0 01-3-3V4.5a3 3 0 116 0v8.25a3 3 0 01-3 3z" />
|
||||
</svg>
|
||||
),
|
||||
'Smart Home': (
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M2.25 12l8.954-8.955c.44-.439 1.152-.439 1.591 0L21.75 12M4.5 9.75v10.125c0 .621.504 1.125 1.125 1.125H9.75v-4.875c0-.621.504-1.125 1.125-1.125h2.25c.621 0 1.125.504 1.125 1.125V21h4.125c.621 0 1.125-.504 1.125-1.125V9.75M8.25 21h8.25" />
|
||||
</svg>
|
||||
),
|
||||
'Infrastructure': (
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M5.25 14.25h13.5m-13.5 0a3 3 0 01-3-3m3 3a3 3 0 100 6h13.5a3 3 0 100-6m-16.5-3a3 3 0 013-3h13.5a3 3 0 013 3m-19.5 0a4.5 4.5 0 01.9-2.7L5.737 5.1a3.375 3.375 0 012.7-1.35h7.126c1.062 0 2.062.5 2.7 1.35l2.587 3.45a4.5 4.5 0 01.9 2.7m0 0a3 3 0 01-3 3m0 3h.008v.008h-.008v-.008zm0-6h.008v.008h-.008v-.008zm-3 6h.008v.008h-.008v-.008zm0-6h.008v.008h-.008v-.008z" />
|
||||
</svg>
|
||||
),
|
||||
};
|
||||
|
||||
function StatusDot({ status }) {
|
||||
const colors = {
|
||||
online: 'bg-emerald-400 shadow-emerald-400/50',
|
||||
offline: 'bg-red-400 shadow-red-400/50',
|
||||
checking: 'bg-amber-400 shadow-amber-400/50 animate-pulse',
|
||||
unknown: 'bg-gray-500',
|
||||
};
|
||||
return (
|
||||
<span className={`inline-block w-2.5 h-2.5 rounded-full shadow-lg ${colors[status] || colors.unknown}`} />
|
||||
);
|
||||
}
|
||||
|
||||
export default function ServiceStatus() {
|
||||
const [statuses, setStatuses] = useState(() =>
|
||||
Object.fromEntries(SERVICES.map(s => [s.name, { status: 'checking', lastCheck: null, responseTime: null }]))
|
||||
);
|
||||
const [lastRefresh, setLastRefresh] = useState(null);
|
||||
const [restarting, setRestarting] = useState({});
|
||||
|
||||
const checkService = useCallback(async (service) => {
|
||||
try {
|
||||
// Route all checks through the server-side proxy to avoid CORS and
|
||||
// self-signed SSL cert issues in the browser.
|
||||
const target = encodeURIComponent(service.url + service.healthPath);
|
||||
const modeParam = service.tcp ? '&mode=tcp' : '';
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(() => controller.abort(), 8000);
|
||||
|
||||
const res = await fetch(`/api/health?url=${target}${modeParam}`, { signal: controller.signal });
|
||||
clearTimeout(timeout);
|
||||
|
||||
const data = await res.json();
|
||||
return { status: data.status, lastCheck: new Date(), responseTime: data.responseTime };
|
||||
} catch {
|
||||
return { status: 'offline', lastCheck: new Date(), responseTime: null };
|
||||
}
|
||||
}, []);
|
||||
|
||||
const refreshAll = useCallback(async () => {
|
||||
// Mark all as checking
|
||||
setStatuses(prev =>
|
||||
Object.fromEntries(Object.entries(prev).map(([k, v]) => [k, { ...v, status: 'checking' }]))
|
||||
);
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
SERVICES.map(async (service) => {
|
||||
const result = await checkService(service);
|
||||
return { name: service.name, ...result };
|
||||
})
|
||||
);
|
||||
|
||||
const newStatuses = {};
|
||||
for (const r of results) {
|
||||
if (r.status === 'fulfilled') {
|
||||
newStatuses[r.value.name] = {
|
||||
status: r.value.status,
|
||||
lastCheck: r.value.lastCheck,
|
||||
responseTime: r.value.responseTime,
|
||||
};
|
||||
}
|
||||
}
|
||||
setStatuses(prev => ({ ...prev, ...newStatuses }));
|
||||
setLastRefresh(new Date());
|
||||
}, [checkService]);
|
||||
|
||||
useEffect(() => {
|
||||
refreshAll();
|
||||
const interval = setInterval(refreshAll, 30000);
|
||||
return () => clearInterval(interval);
|
||||
}, [refreshAll]);
|
||||
|
||||
const restartService = useCallback(async (service) => {
|
||||
if (!service.restart) return;
|
||||
setRestarting(prev => ({ ...prev, [service.name]: true }));
|
||||
try {
|
||||
const res = await fetch('/api/service/restart', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(service.restart),
|
||||
});
|
||||
const data = await res.json();
|
||||
if (!data.ok) {
|
||||
console.error(`Restart failed for ${service.name}:`, data.error);
|
||||
}
|
||||
// Wait a moment for the service to come back, then re-check
|
||||
setTimeout(async () => {
|
||||
const result = await checkService(service);
|
||||
setStatuses(prev => ({ ...prev, [service.name]: result }));
|
||||
setRestarting(prev => ({ ...prev, [service.name]: false }));
|
||||
}, 3000);
|
||||
} catch (err) {
|
||||
console.error(`Restart failed for ${service.name}:`, err);
|
||||
setRestarting(prev => ({ ...prev, [service.name]: false }));
|
||||
}
|
||||
}, [checkService]);
|
||||
|
||||
const categories = [...new Set(SERVICES.map(s => s.category))];
|
||||
const onlineCount = Object.values(statuses).filter(s => s.status === 'online').length;
|
||||
const offlineCount = Object.values(statuses).filter(s => s.status === 'offline').length;
|
||||
const totalCount = SERVICES.length;
|
||||
const allOnline = onlineCount === totalCount;
|
||||
|
||||
return (
|
||||
<div className="space-y-8">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<h1 className="text-3xl font-bold text-gray-100">Service Status</h1>
|
||||
<p className="text-sm text-gray-500 mt-1">
|
||||
{onlineCount}/{totalCount} services online
|
||||
{lastRefresh && (
|
||||
<span className="ml-3">
|
||||
Last check: {lastRefresh.toLocaleTimeString()}
|
||||
</span>
|
||||
)}
|
||||
</p>
|
||||
</div>
|
||||
<button
|
||||
onClick={refreshAll}
|
||||
className="flex items-center gap-2 px-4 py-2 bg-gray-800 hover:bg-gray-700 text-gray-300 rounded-lg border border-gray-700 transition-colors"
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M16.023 9.348h4.992v-.001M2.985 19.644v-4.992m0 0h4.992m-4.993 0l3.181 3.183a8.25 8.25 0 0013.803-3.7M4.031 9.865a8.25 8.25 0 0113.803-3.7l3.181 3.182" />
|
||||
</svg>
|
||||
Refresh
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Summary bar */}
|
||||
<div className="h-2 rounded-full bg-gray-800 overflow-hidden flex">
|
||||
{allOnline ? (
|
||||
<div
|
||||
className="h-full bg-gradient-to-r from-purple-500 to-indigo-500 transition-all duration-500"
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
) : (
|
||||
<>
|
||||
<div
|
||||
className="h-full bg-gradient-to-r from-emerald-500 to-emerald-400 transition-all duration-500"
|
||||
style={{ width: `${(onlineCount / totalCount) * 100}%` }}
|
||||
/>
|
||||
<div
|
||||
className="h-full bg-gradient-to-r from-red-500 to-red-400 transition-all duration-500"
|
||||
style={{ width: `${(offlineCount / totalCount) * 100}%` }}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Service grid by category */}
|
||||
{categories.map(category => (
|
||||
<div key={category}>
|
||||
<div className="flex items-center gap-2 mb-4">
|
||||
<span className="text-gray-400">{CATEGORY_ICONS[category]}</span>
|
||||
<h2 className="text-lg font-semibold text-gray-300">{category}</h2>
|
||||
</div>
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
|
||||
{SERVICES.filter(s => s.category === category).map(service => {
|
||||
const st = statuses[service.name] || { status: 'unknown' };
|
||||
return (
|
||||
<div
|
||||
key={service.name}
|
||||
className={`relative rounded-xl border p-4 transition-all duration-200 ${
|
||||
st.status === 'online'
|
||||
? 'bg-gray-800/50 border-gray-700 hover:border-emerald-500/50'
|
||||
: st.status === 'offline'
|
||||
? 'bg-gray-800/50 border-red-500/30 hover:border-red-500/50'
|
||||
: 'bg-gray-800/50 border-gray-700'
|
||||
}`}
|
||||
>
|
||||
<div className="flex items-start justify-between">
|
||||
<div className="flex-1">
|
||||
<div className="flex items-center gap-2">
|
||||
<StatusDot status={st.status} />
|
||||
<h3 className="font-medium text-gray-200">{service.name}</h3>
|
||||
</div>
|
||||
<p className="text-xs text-gray-500 mt-1">{service.description}</p>
|
||||
{st.responseTime !== null && (
|
||||
<p className="text-xs text-gray-600 mt-0.5">{st.responseTime}ms</p>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
{service.restart && st.status === 'offline' && (
|
||||
<button
|
||||
onClick={() => restartService(service)}
|
||||
disabled={restarting[service.name]}
|
||||
className="text-xs px-2.5 py-1 rounded-md bg-amber-600/80 hover:bg-amber-500 disabled:bg-gray-700 disabled:text-gray-500 text-white transition-colors flex items-center gap-1"
|
||||
>
|
||||
{restarting[service.name] ? (
|
||||
<>
|
||||
<svg className="w-3 h-3 animate-spin" fill="none" viewBox="0 0 24 24">
|
||||
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4" />
|
||||
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4z" />
|
||||
</svg>
|
||||
Restarting
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<svg className="w-3 h-3" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M5.636 18.364a9 9 0 010-12.728m12.728 0a9 9 0 010 12.728M12 9v3m0 0v3m0-3h3m-3 0H9" />
|
||||
</svg>
|
||||
Restart
|
||||
</>
|
||||
)}
|
||||
</button>
|
||||
)}
|
||||
{service.uiUrl && (
|
||||
<a
|
||||
href={service.uiUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-xs px-2.5 py-1 rounded-md bg-gray-700 hover:bg-gray-600 text-gray-300 transition-colors flex items-center gap-1"
|
||||
>
|
||||
Open
|
||||
<svg className="w-3 h-3" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M13.5 6H5.25A2.25 2.25 0 003 8.25v10.5A2.25 2.25 0 005.25 21h10.5A2.25 2.25 0 0018 18.75V10.5m-10.5 6L21 3m0 0h-5.25M21 3v5.25" />
|
||||
</svg>
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
1
homeai-character/src/assets/react.svg
Normal file
1
homeai-character/src/assets/react.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 4.0 KiB |
13
homeai-character/src/index.css
Normal file
13
homeai-character/src/index.css
Normal file
@@ -0,0 +1,13 @@
|
||||
@import "tailwindcss";
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
background-color: #030712;
|
||||
color: #f3f4f6;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
}
|
||||
|
||||
#root {
|
||||
min-height: 100vh;
|
||||
}
|
||||
10
homeai-character/src/main.jsx
Normal file
10
homeai-character/src/main.jsx
Normal file
@@ -0,0 +1,10 @@
|
||||
import { StrictMode } from 'react'
|
||||
import { createRoot } from 'react-dom/client'
|
||||
import './index.css'
|
||||
import App from './App.jsx'
|
||||
|
||||
createRoot(document.getElementById('root')).render(
|
||||
<StrictMode>
|
||||
<App />
|
||||
</StrictMode>,
|
||||
)
|
||||
169
homeai-character/vite.config.js
Normal file
169
homeai-character/vite.config.js
Normal file
@@ -0,0 +1,169 @@
|
||||
import { defineConfig } from 'vite'
|
||||
import react from '@vitejs/plugin-react'
|
||||
import tailwindcss from '@tailwindcss/vite'
|
||||
|
||||
function healthCheckPlugin() {
|
||||
return {
|
||||
name: 'health-check-proxy',
|
||||
configureServer(server) {
|
||||
server.middlewares.use('/api/health', async (req, res) => {
|
||||
const params = new URL(req.url, 'http://localhost').searchParams;
|
||||
const url = params.get('url');
|
||||
const mode = params.get('mode'); // 'tcp' for raw TCP port check
|
||||
if (!url) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Missing url param' }));
|
||||
return;
|
||||
}
|
||||
const start = Date.now();
|
||||
const parsedUrl = new URL(url);
|
||||
|
||||
try {
|
||||
if (mode === 'tcp') {
|
||||
// TCP socket connect check for non-HTTP services (e.g. Wyoming)
|
||||
const { default: net } = await import('net');
|
||||
await new Promise((resolve, reject) => {
|
||||
const socket = net.createConnection(
|
||||
{ host: parsedUrl.hostname, port: parseInt(parsedUrl.port), timeout: 5000 },
|
||||
() => { socket.destroy(); resolve(); }
|
||||
);
|
||||
socket.on('error', reject);
|
||||
socket.on('timeout', () => { socket.destroy(); reject(new Error('timeout')); });
|
||||
});
|
||||
} else {
|
||||
// HTTP/HTTPS health check
|
||||
const { default: https } = await import('https');
|
||||
const { default: http } = await import('http');
|
||||
const client = parsedUrl.protocol === 'https:' ? https : http;
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
const reqObj = client.get(url, { rejectUnauthorized: false, timeout: 5000 }, (resp) => {
|
||||
resp.resume();
|
||||
resolve();
|
||||
});
|
||||
reqObj.on('error', reject);
|
||||
reqObj.on('timeout', () => { reqObj.destroy(); reject(new Error('timeout')); });
|
||||
});
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ status: 'online', responseTime: Date.now() - start }));
|
||||
} catch {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ status: 'offline', responseTime: null }));
|
||||
}
|
||||
});
|
||||
// Service restart — runs launchctl or docker restart
|
||||
server.middlewares.use('/api/service/restart', async (req, res) => {
|
||||
if (req.method === 'OPTIONS') {
|
||||
res.writeHead(204, { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Methods': 'POST', 'Access-Control-Allow-Headers': 'Content-Type' });
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
if (req.method !== 'POST') {
|
||||
res.writeHead(405);
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const chunks = [];
|
||||
for await (const chunk of req) chunks.push(chunk);
|
||||
const { type, id } = JSON.parse(Buffer.concat(chunks).toString());
|
||||
|
||||
if (!type || !id) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ ok: false, error: 'Missing type or id' }));
|
||||
return;
|
||||
}
|
||||
|
||||
// Whitelist valid service IDs to prevent command injection
|
||||
const ALLOWED_LAUNCHD = [
|
||||
'gui/501/com.homeai.ollama',
|
||||
'gui/501/com.homeai.openclaw',
|
||||
'gui/501/com.homeai.openclaw-bridge',
|
||||
'gui/501/com.homeai.wyoming-stt',
|
||||
'gui/501/com.homeai.wyoming-tts',
|
||||
'gui/501/com.homeai.wyoming-satellite',
|
||||
'gui/501/com.homeai.character-dashboard',
|
||||
];
|
||||
const ALLOWED_DOCKER = [
|
||||
'homeai-open-webui',
|
||||
'homeai-uptime-kuma',
|
||||
'homeai-n8n',
|
||||
'homeai-code-server',
|
||||
];
|
||||
|
||||
let cmd;
|
||||
if (type === 'launchd' && ALLOWED_LAUNCHD.includes(id)) {
|
||||
cmd = ['launchctl', 'kickstart', '-k', id];
|
||||
} else if (type === 'docker' && ALLOWED_DOCKER.includes(id)) {
|
||||
cmd = ['docker', 'restart', id];
|
||||
} else {
|
||||
res.writeHead(403, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ ok: false, error: 'Service not in allowed list' }));
|
||||
return;
|
||||
}
|
||||
|
||||
const { execFile } = await import('child_process');
|
||||
const { promisify } = await import('util');
|
||||
const execFileAsync = promisify(execFile);
|
||||
const { stdout, stderr } = await execFileAsync(cmd[0], cmd.slice(1), { timeout: 30000 });
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ ok: true, stdout: stdout.trim(), stderr: stderr.trim() }));
|
||||
} catch (err) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ ok: false, error: err.message }));
|
||||
}
|
||||
});
|
||||
|
||||
// TTS preview proxy — forwards POST to OpenClaw bridge, returns audio
|
||||
server.middlewares.use('/api/tts', async (req, res) => {
|
||||
if (req.method !== 'POST') {
|
||||
res.writeHead(405);
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const { default: http } = await import('http');
|
||||
const chunks = [];
|
||||
for await (const chunk of req) chunks.push(chunk);
|
||||
const body = Buffer.concat(chunks);
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
const proxyReq = http.request(
|
||||
'http://localhost:8081/api/tts',
|
||||
{ method: 'POST', headers: { 'Content-Type': 'application/json', 'Content-Length': body.length }, timeout: 30000 },
|
||||
(proxyRes) => {
|
||||
res.writeHead(proxyRes.statusCode, {
|
||||
'Content-Type': proxyRes.headers['content-type'] || 'audio/wav',
|
||||
});
|
||||
proxyRes.pipe(res);
|
||||
proxyRes.on('end', resolve);
|
||||
}
|
||||
);
|
||||
proxyReq.on('error', reject);
|
||||
proxyReq.on('timeout', () => { proxyReq.destroy(); reject(new Error('timeout')); });
|
||||
proxyReq.write(body);
|
||||
proxyReq.end();
|
||||
});
|
||||
} catch {
|
||||
res.writeHead(502, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'TTS bridge unreachable' }));
|
||||
}
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// https://vite.dev/config/
|
||||
export default defineConfig({
|
||||
plugins: [
|
||||
healthCheckPlugin(),
|
||||
tailwindcss(),
|
||||
react(),
|
||||
],
|
||||
server: {
|
||||
host: '0.0.0.0',
|
||||
},
|
||||
})
|
||||
2
homeai-dashboard/.gitignore
vendored
Normal file
2
homeai-dashboard/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules/
|
||||
dist/
|
||||
49
homeai-dashboard/characters/aria.json
Normal file
49
homeai-dashboard/characters/aria.json
Normal file
@@ -0,0 +1,49 @@
|
||||
{
|
||||
"schema_version": 1,
|
||||
"name": "aria",
|
||||
"display_name": "Aria",
|
||||
"description": "Default HomeAI assistant persona",
|
||||
"system_prompt": "You are Aria, a warm, curious, and helpful AI assistant living in the home. You speak naturally and conversationally — never robotic. You are knowledgeable but never condescending. You remember the people you live with and build on those memories over time. Keep responses concise when controlling smart home devices; be more expressive in casual conversation. Never break character.",
|
||||
"model_overrides": {
|
||||
"primary": "llama3.3:70b",
|
||||
"fast": "qwen2.5:7b"
|
||||
},
|
||||
"tts": {
|
||||
"engine": "chatterbox",
|
||||
"voice_ref_path": "~/voices/aria-raw.wav",
|
||||
"kokoro_voice": "af_heart",
|
||||
"speed": 1.0
|
||||
},
|
||||
"live2d_expressions": {
|
||||
"idle": "expr_idle",
|
||||
"listening": "expr_listening",
|
||||
"thinking": "expr_thinking",
|
||||
"speaking": "expr_speaking",
|
||||
"happy": "expr_happy",
|
||||
"sad": "expr_sad",
|
||||
"surprised": "expr_surprised",
|
||||
"error": "expr_error"
|
||||
},
|
||||
"vtube_ws_triggers": {
|
||||
"thinking": {
|
||||
"type": "hotkey",
|
||||
"id": "expr_thinking"
|
||||
},
|
||||
"speaking": {
|
||||
"type": "hotkey",
|
||||
"id": "expr_speaking"
|
||||
},
|
||||
"idle": {
|
||||
"type": "hotkey",
|
||||
"id": "expr_idle"
|
||||
}
|
||||
},
|
||||
"custom_rules": [
|
||||
{
|
||||
"trigger": "good morning",
|
||||
"response": "Good morning! How did you sleep?",
|
||||
"condition": "time_of_day == morning"
|
||||
}
|
||||
],
|
||||
"notes": "Default persona. Voice clone to be added once reference audio recorded."
|
||||
}
|
||||
15
homeai-dashboard/index.html
Normal file
15
homeai-dashboard/index.html
Normal file
@@ -0,0 +1,15 @@
|
||||
<!doctype html>
|
||||
<html lang="en" class="dark">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/icon.svg" />
|
||||
<link rel="manifest" href="/manifest.json" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta name="theme-color" content="#030712" />
|
||||
<title>HomeAI Dashboard</title>
|
||||
</head>
|
||||
<body class="bg-gray-950 text-gray-100">
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.jsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
41
homeai-dashboard/launchd/com.homeai.dashboard.plist
Normal file
41
homeai-dashboard/launchd/com.homeai.dashboard.plist
Normal file
@@ -0,0 +1,41 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN"
|
||||
"http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.homeai.dashboard</string>
|
||||
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/opt/homebrew/bin/npx</string>
|
||||
<string>vite</string>
|
||||
<string>--host</string>
|
||||
<string>--port</string>
|
||||
<string>5173</string>
|
||||
</array>
|
||||
|
||||
<key>WorkingDirectory</key>
|
||||
<string>/Users/aodhan/gitea/homeai/homeai-dashboard</string>
|
||||
|
||||
<key>EnvironmentVariables</key>
|
||||
<dict>
|
||||
<key>PATH</key>
|
||||
<string>/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin</string>
|
||||
<key>HOME</key>
|
||||
<string>/Users/aodhan</string>
|
||||
</dict>
|
||||
|
||||
<key>RunAtLoad</key>
|
||||
<true/>
|
||||
|
||||
<key>KeepAlive</key>
|
||||
<true/>
|
||||
|
||||
<key>StandardOutPath</key>
|
||||
<string>/tmp/homeai-dashboard.log</string>
|
||||
|
||||
<key>StandardErrorPath</key>
|
||||
<string>/tmp/homeai-dashboard-error.log</string>
|
||||
</dict>
|
||||
</plist>
|
||||
2229
homeai-dashboard/package-lock.json
generated
Normal file
2229
homeai-dashboard/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
26
homeai-dashboard/package.json
Normal file
26
homeai-dashboard/package.json
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"name": "homeai-dashboard",
|
||||
"private": true,
|
||||
"version": "0.1.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tailwindcss/vite": "^4.2.1",
|
||||
"ajv": "^8.18.0",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"react-router-dom": "^7.13.1",
|
||||
"tailwindcss": "^4.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@vitejs/plugin-react": "^5.1.1",
|
||||
"vite": "^8.0.0-beta.13"
|
||||
},
|
||||
"overrides": {
|
||||
"vite": "^8.0.0-beta.13"
|
||||
}
|
||||
}
|
||||
9
homeai-dashboard/public/icon.svg
Normal file
9
homeai-dashboard/public/icon.svg
Normal file
@@ -0,0 +1,9 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 64 64">
|
||||
<rect width="64" height="64" rx="14" fill="#030712"/>
|
||||
<circle cx="32" cy="28" r="12" fill="none" stroke="#818cf8" stroke-width="2.5"/>
|
||||
<path d="M26 26c0-3.3 2.7-6 6-6s6 2.7 6 6" fill="none" stroke="#818cf8" stroke-width="2" stroke-linecap="round"/>
|
||||
<rect x="30" y="40" width="4" height="8" rx="2" fill="#818cf8"/>
|
||||
<path d="M24 52h16" stroke="#818cf8" stroke-width="2.5" stroke-linecap="round"/>
|
||||
<circle cx="29" cy="27" r="1.5" fill="#34d399"/>
|
||||
<circle cx="35" cy="27" r="1.5" fill="#34d399"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 575 B |
16
homeai-dashboard/public/manifest.json
Normal file
16
homeai-dashboard/public/manifest.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"name": "HomeAI Dashboard",
|
||||
"short_name": "HomeAI",
|
||||
"description": "HomeAI dashboard — services, chat, and character management",
|
||||
"start_url": "/",
|
||||
"display": "standalone",
|
||||
"background_color": "#030712",
|
||||
"theme_color": "#030712",
|
||||
"icons": [
|
||||
{
|
||||
"src": "/icon.svg",
|
||||
"sizes": "any",
|
||||
"type": "image/svg+xml"
|
||||
}
|
||||
]
|
||||
}
|
||||
82
homeai-dashboard/schema/character.schema.json
Normal file
82
homeai-dashboard/schema/character.schema.json
Normal file
@@ -0,0 +1,82 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "HomeAI Character Config",
|
||||
"version": "1",
|
||||
"type": "object",
|
||||
"required": ["schema_version", "name", "system_prompt", "tts"],
|
||||
"properties": {
|
||||
"schema_version": { "type": "integer", "const": 1 },
|
||||
"name": { "type": "string" },
|
||||
"display_name": { "type": "string" },
|
||||
"description": { "type": "string" },
|
||||
|
||||
"system_prompt": { "type": "string" },
|
||||
|
||||
"model_overrides": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"primary": { "type": "string" },
|
||||
"fast": { "type": "string" }
|
||||
}
|
||||
},
|
||||
|
||||
"tts": {
|
||||
"type": "object",
|
||||
"required": ["engine"],
|
||||
"properties": {
|
||||
"engine": {
|
||||
"type": "string",
|
||||
"enum": ["kokoro", "chatterbox", "qwen3", "elevenlabs"]
|
||||
},
|
||||
"voice_ref_path": { "type": "string" },
|
||||
"kokoro_voice": { "type": "string" },
|
||||
"elevenlabs_voice_id": { "type": "string" },
|
||||
"elevenlabs_model": { "type": "string", "default": "eleven_monolingual_v1" },
|
||||
"speed": { "type": "number", "default": 1.0 }
|
||||
}
|
||||
},
|
||||
|
||||
"live2d_expressions": {
|
||||
"type": "object",
|
||||
"description": "Maps semantic state to VTube Studio hotkey ID",
|
||||
"properties": {
|
||||
"idle": { "type": "string" },
|
||||
"listening": { "type": "string" },
|
||||
"thinking": { "type": "string" },
|
||||
"speaking": { "type": "string" },
|
||||
"happy": { "type": "string" },
|
||||
"sad": { "type": "string" },
|
||||
"surprised": { "type": "string" },
|
||||
"error": { "type": "string" }
|
||||
}
|
||||
},
|
||||
|
||||
"vtube_ws_triggers": {
|
||||
"type": "object",
|
||||
"description": "VTube Studio WebSocket actions keyed by event name",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": { "type": "string", "enum": ["hotkey", "parameter"] },
|
||||
"id": { "type": "string" },
|
||||
"value": { "type": "number" }
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"custom_rules": {
|
||||
"type": "array",
|
||||
"description": "Trigger/response overrides for specific contexts",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"trigger": { "type": "string" },
|
||||
"response": { "type": "string" },
|
||||
"condition": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"notes": { "type": "string" }
|
||||
}
|
||||
}
|
||||
123
homeai-dashboard/src/App.jsx
Normal file
123
homeai-dashboard/src/App.jsx
Normal file
@@ -0,0 +1,123 @@
|
||||
import { BrowserRouter, Routes, Route, NavLink } from 'react-router-dom';
|
||||
import Dashboard from './pages/Dashboard';
|
||||
import Chat from './pages/Chat';
|
||||
import Characters from './pages/Characters';
|
||||
import Editor from './pages/Editor';
|
||||
|
||||
function NavItem({ to, children, icon }) {
|
||||
return (
|
||||
<NavLink
|
||||
to={to}
|
||||
className={({ isActive }) =>
|
||||
`flex items-center gap-3 px-4 py-2.5 rounded-lg text-sm font-medium transition-colors ${
|
||||
isActive
|
||||
? 'bg-gray-800 text-white'
|
||||
: 'text-gray-400 hover:text-gray-200 hover:bg-gray-800/50'
|
||||
}`
|
||||
}
|
||||
>
|
||||
{icon}
|
||||
<span>{children}</span>
|
||||
</NavLink>
|
||||
);
|
||||
}
|
||||
|
||||
function Layout({ children }) {
|
||||
return (
|
||||
<div className="h-screen bg-gray-950 flex overflow-hidden">
|
||||
{/* Sidebar */}
|
||||
<aside className="w-64 bg-gray-900 border-r border-gray-800 flex flex-col shrink-0">
|
||||
{/* Logo */}
|
||||
<div className="px-6 py-5 border-b border-gray-800">
|
||||
<div className="flex items-center gap-3">
|
||||
<div className="w-9 h-9 rounded-lg bg-gradient-to-br from-indigo-500 to-purple-600 flex items-center justify-center">
|
||||
<svg className="w-5 h-5 text-white" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M2.25 12l8.954-8.955c.44-.439 1.152-.439 1.591 0L21.75 12M4.5 9.75v10.125c0 .621.504 1.125 1.125 1.125H9.75v-4.875c0-.621.504-1.125 1.125-1.125h2.25c.621 0 1.125.504 1.125 1.125V21h4.125c.621 0 1.125-.504 1.125-1.125V9.75M8.25 21h8.25" />
|
||||
</svg>
|
||||
</div>
|
||||
<div>
|
||||
<h1 className="text-lg font-bold text-white tracking-tight">HomeAI</h1>
|
||||
<p className="text-xs text-gray-500">LINDBLUM</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Nav */}
|
||||
<nav className="flex-1 px-3 py-4 space-y-1">
|
||||
<NavItem
|
||||
to="/"
|
||||
icon={
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M3.75 6A2.25 2.25 0 016 3.75h2.25A2.25 2.25 0 0110.5 6v2.25a2.25 2.25 0 01-2.25 2.25H6a2.25 2.25 0 01-2.25-2.25V6zM3.75 15.75A2.25 2.25 0 016 13.5h2.25a2.25 2.25 0 012.25 2.25V18a2.25 2.25 0 01-2.25 2.25H6A2.25 2.25 0 013.75 18v-2.25zM13.5 6a2.25 2.25 0 012.25-2.25H18A2.25 2.25 0 0120.25 6v2.25A2.25 2.25 0 0118 10.5h-2.25a2.25 2.25 0 01-2.25-2.25V6zM13.5 15.75a2.25 2.25 0 012.25-2.25H18a2.25 2.25 0 012.25 2.25V18A2.25 2.25 0 0118 20.25h-2.25A2.25 2.25 0 0113.5 18v-2.25z" />
|
||||
</svg>
|
||||
}
|
||||
>
|
||||
Dashboard
|
||||
</NavItem>
|
||||
|
||||
<NavItem
|
||||
to="/chat"
|
||||
icon={
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M8.625 12a.375.375 0 11-.75 0 .375.375 0 01.75 0zm0 0H8.25m4.125 0a.375.375 0 11-.75 0 .375.375 0 01.75 0zm0 0H12m4.125 0a.375.375 0 11-.75 0 .375.375 0 01.75 0zm0 0h-.375M21 12c0 4.556-4.03 8.25-9 8.25a9.764 9.764 0 01-2.555-.337A5.972 5.972 0 015.41 20.97a5.969 5.969 0 01-.474-.065 4.48 4.48 0 00.978-2.025c.09-.457-.133-.901-.467-1.226C3.93 16.178 3 14.189 3 12c0-4.556 4.03-8.25 9-8.25s9 3.694 9 8.25z" />
|
||||
</svg>
|
||||
}
|
||||
>
|
||||
Chat
|
||||
</NavItem>
|
||||
|
||||
<NavItem
|
||||
to="/characters"
|
||||
icon={
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M15.75 6a3.75 3.75 0 11-7.5 0 3.75 3.75 0 017.5 0zM4.501 20.118a7.5 7.5 0 0114.998 0A17.933 17.933 0 0112 21.75c-2.676 0-5.216-.584-7.499-1.632z" />
|
||||
</svg>
|
||||
}
|
||||
>
|
||||
Characters
|
||||
</NavItem>
|
||||
|
||||
<NavItem
|
||||
to="/editor"
|
||||
icon={
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M9.594 3.94c.09-.542.56-.94 1.11-.94h2.593c.55 0 1.02.398 1.11.94l.213 1.281c.063.374.313.686.645.87.074.04.147.083.22.127.324.196.72.257 1.075.124l1.217-.456a1.125 1.125 0 011.37.49l1.296 2.247a1.125 1.125 0 01-.26 1.431l-1.003.827c-.293.24-.438.613-.431.992a6.759 6.759 0 010 .255c-.007.378.138.75.43.99l1.005.828c.424.35.534.954.26 1.43l-1.298 2.247a1.125 1.125 0 01-1.369.491l-1.217-.456c-.355-.133-.75-.072-1.076.124a6.57 6.57 0 01-.22.128c-.331.183-.581.495-.644.869l-.213 1.28c-.09.543-.56.941-1.11.941h-2.594c-.55 0-1.02-.398-1.11-.94l-.213-1.281c-.062-.374-.312-.686-.644-.87a6.52 6.52 0 01-.22-.127c-.325-.196-.72-.257-1.076-.124l-1.217.456a1.125 1.125 0 01-1.369-.49l-1.297-2.247a1.125 1.125 0 01.26-1.431l1.004-.827c.292-.24.437-.613.43-.992a6.932 6.932 0 010-.255c.007-.378-.138-.75-.43-.99l-1.004-.828a1.125 1.125 0 01-.26-1.43l1.297-2.247a1.125 1.125 0 011.37-.491l1.216.456c.356.133.751.072 1.076-.124.072-.044.146-.087.22-.128.332-.183.582-.495.644-.869l.214-1.281z" />
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M15 12a3 3 0 11-6 0 3 3 0 016 0z" />
|
||||
</svg>
|
||||
}
|
||||
>
|
||||
Editor
|
||||
</NavItem>
|
||||
</nav>
|
||||
|
||||
{/* Footer */}
|
||||
<div className="px-6 py-4 border-t border-gray-800">
|
||||
<p className="text-xs text-gray-600">HomeAI v0.1.0</p>
|
||||
<p className="text-xs text-gray-700">Mac Mini M4 Pro</p>
|
||||
</div>
|
||||
</aside>
|
||||
|
||||
{/* Main content */}
|
||||
<main className="flex-1 overflow-hidden flex flex-col">
|
||||
{children}
|
||||
</main>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function App() {
|
||||
return (
|
||||
<BrowserRouter>
|
||||
<Layout>
|
||||
<Routes>
|
||||
<Route path="/" element={<div className="flex-1 overflow-y-auto p-8"><div className="max-w-6xl mx-auto"><Dashboard /></div></div>} />
|
||||
<Route path="/chat" element={<Chat />} />
|
||||
<Route path="/characters" element={<div className="flex-1 overflow-y-auto p-8"><div className="max-w-6xl mx-auto"><Characters /></div></div>} />
|
||||
<Route path="/editor" element={<div className="flex-1 overflow-y-auto p-8"><div className="max-w-6xl mx-auto"><Editor /></div></div>} />
|
||||
</Routes>
|
||||
</Layout>
|
||||
</BrowserRouter>
|
||||
);
|
||||
}
|
||||
|
||||
export default App;
|
||||
35
homeai-dashboard/src/components/ChatPanel.jsx
Normal file
35
homeai-dashboard/src/components/ChatPanel.jsx
Normal file
@@ -0,0 +1,35 @@
|
||||
import { useEffect, useRef } from 'react'
|
||||
import MessageBubble from './MessageBubble'
|
||||
import ThinkingIndicator from './ThinkingIndicator'
|
||||
|
||||
export default function ChatPanel({ messages, isLoading, onReplay }) {
|
||||
const bottomRef = useRef(null)
|
||||
|
||||
useEffect(() => {
|
||||
bottomRef.current?.scrollIntoView({ behavior: 'smooth' })
|
||||
}, [messages, isLoading])
|
||||
|
||||
if (messages.length === 0 && !isLoading) {
|
||||
return (
|
||||
<div className="flex-1 flex items-center justify-center">
|
||||
<div className="text-center">
|
||||
<div className="w-16 h-16 rounded-full bg-indigo-600/20 flex items-center justify-center mx-auto mb-4">
|
||||
<span className="text-indigo-400 text-2xl">AI</span>
|
||||
</div>
|
||||
<h2 className="text-xl font-medium text-gray-200 mb-2">Hi, I'm Aria</h2>
|
||||
<p className="text-gray-500 text-sm">Type a message or press the mic to talk</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex-1 overflow-y-auto py-4">
|
||||
{messages.map((msg) => (
|
||||
<MessageBubble key={msg.id} message={msg} onReplay={onReplay} />
|
||||
))}
|
||||
{isLoading && <ThinkingIndicator />}
|
||||
<div ref={bottomRef} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
53
homeai-dashboard/src/components/InputBar.jsx
Normal file
53
homeai-dashboard/src/components/InputBar.jsx
Normal file
@@ -0,0 +1,53 @@
|
||||
import { useState, useRef } from 'react'
|
||||
import VoiceButton from './VoiceButton'
|
||||
|
||||
export default function InputBar({ onSend, onVoiceToggle, isLoading, isRecording, isTranscribing }) {
|
||||
const [text, setText] = useState('')
|
||||
const inputRef = useRef(null)
|
||||
|
||||
const handleSubmit = (e) => {
|
||||
e.preventDefault()
|
||||
if (!text.trim() || isLoading) return
|
||||
onSend(text)
|
||||
setText('')
|
||||
}
|
||||
|
||||
const handleKeyDown = (e) => {
|
||||
if (e.key === 'Enter' && !e.shiftKey) {
|
||||
e.preventDefault()
|
||||
handleSubmit(e)
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<form onSubmit={handleSubmit} className="border-t border-gray-800 bg-gray-950 px-4 py-3 shrink-0">
|
||||
<div className="flex items-end gap-2 max-w-3xl mx-auto">
|
||||
<VoiceButton
|
||||
isRecording={isRecording}
|
||||
isTranscribing={isTranscribing}
|
||||
onToggle={onVoiceToggle}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<textarea
|
||||
ref={inputRef}
|
||||
value={text}
|
||||
onChange={(e) => setText(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
placeholder="Type a message..."
|
||||
rows={1}
|
||||
className="flex-1 bg-gray-800 text-gray-100 rounded-xl px-4 py-2.5 text-sm resize-none placeholder-gray-500 focus:outline-none focus:ring-1 focus:ring-indigo-500 min-h-[42px] max-h-32"
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<button
|
||||
type="submit"
|
||||
disabled={!text.trim() || isLoading}
|
||||
className="w-10 h-10 rounded-full bg-indigo-600 text-white flex items-center justify-center shrink-0 hover:bg-indigo-500 disabled:opacity-40 disabled:hover:bg-indigo-600 transition-colors"
|
||||
>
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M6 12L3.269 3.126A59.768 59.768 0 0121.485 12 59.77 59.77 0 013.27 20.876L5.999 12zm0 0h7.5" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
39
homeai-dashboard/src/components/MessageBubble.jsx
Normal file
39
homeai-dashboard/src/components/MessageBubble.jsx
Normal file
@@ -0,0 +1,39 @@
|
||||
export default function MessageBubble({ message, onReplay }) {
|
||||
const isUser = message.role === 'user'
|
||||
|
||||
return (
|
||||
<div className={`flex ${isUser ? 'justify-end' : 'justify-start'} px-4 py-1.5`}>
|
||||
<div className={`flex items-start gap-3 max-w-[80%] ${isUser ? 'flex-row-reverse' : ''}`}>
|
||||
{!isUser && (
|
||||
<div className="w-8 h-8 rounded-full bg-indigo-600/20 flex items-center justify-center shrink-0 mt-0.5">
|
||||
<span className="text-indigo-400 text-sm">AI</span>
|
||||
</div>
|
||||
)}
|
||||
<div>
|
||||
<div
|
||||
className={`rounded-2xl px-4 py-2.5 text-sm leading-relaxed whitespace-pre-wrap ${
|
||||
isUser
|
||||
? 'bg-indigo-600 text-white'
|
||||
: message.isError
|
||||
? 'bg-red-900/40 text-red-200 border border-red-800/50'
|
||||
: 'bg-gray-800 text-gray-100'
|
||||
}`}
|
||||
>
|
||||
{message.content}
|
||||
</div>
|
||||
{!isUser && !message.isError && onReplay && (
|
||||
<button
|
||||
onClick={() => onReplay(message.content)}
|
||||
className="mt-1 ml-1 text-gray-500 hover:text-indigo-400 transition-colors"
|
||||
title="Replay audio"
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M15.536 8.464a5 5 0 010 7.072M17.95 6.05a8 8 0 010 11.9M6.5 9H4a1 1 0 00-1 1v4a1 1 0 001 1h2.5l4 4V5l-4 4z" />
|
||||
</svg>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
74
homeai-dashboard/src/components/SettingsDrawer.jsx
Normal file
74
homeai-dashboard/src/components/SettingsDrawer.jsx
Normal file
@@ -0,0 +1,74 @@
|
||||
import { VOICES } from '../lib/constants'
|
||||
|
||||
export default function SettingsDrawer({ isOpen, onClose, settings, onUpdate }) {
|
||||
if (!isOpen) return null
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="fixed inset-0 bg-black/50 z-40" onClick={onClose} />
|
||||
<div className="fixed right-0 top-0 bottom-0 w-80 bg-gray-900 border-l border-gray-800 z-50 flex flex-col">
|
||||
<div className="flex items-center justify-between px-4 py-3 border-b border-gray-800">
|
||||
<h2 className="text-sm font-medium text-gray-200">Settings</h2>
|
||||
<button onClick={onClose} className="text-gray-500 hover:text-gray-300">
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M6 18L18 6M6 6l12 12" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
<div className="flex-1 overflow-y-auto p-4 space-y-5">
|
||||
{/* Voice */}
|
||||
<div>
|
||||
<label className="block text-xs font-medium text-gray-400 mb-1.5">Voice</label>
|
||||
<select
|
||||
value={settings.voice}
|
||||
onChange={(e) => onUpdate('voice', e.target.value)}
|
||||
className="w-full bg-gray-800 text-gray-200 text-sm rounded-lg px-3 py-2 border border-gray-700 focus:outline-none focus:border-indigo-500"
|
||||
>
|
||||
{VOICES.map((v) => (
|
||||
<option key={v.id} value={v.id}>{v.label}</option>
|
||||
))}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
{/* Auto TTS */}
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<div className="text-sm text-gray-200">Auto-speak responses</div>
|
||||
<div className="text-xs text-gray-500">Speak assistant replies aloud</div>
|
||||
</div>
|
||||
<button
|
||||
onClick={() => onUpdate('autoTts', !settings.autoTts)}
|
||||
className={`relative w-10 h-6 rounded-full transition-colors ${
|
||||
settings.autoTts ? 'bg-indigo-600' : 'bg-gray-700'
|
||||
}`}
|
||||
>
|
||||
<span
|
||||
className={`absolute top-0.5 left-0.5 w-5 h-5 rounded-full bg-white transition-transform ${
|
||||
settings.autoTts ? 'translate-x-4' : ''
|
||||
}`}
|
||||
/>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* STT Mode */}
|
||||
<div>
|
||||
<label className="block text-xs font-medium text-gray-400 mb-1.5">Speech recognition</label>
|
||||
<select
|
||||
value={settings.sttMode}
|
||||
onChange={(e) => onUpdate('sttMode', e.target.value)}
|
||||
className="w-full bg-gray-800 text-gray-200 text-sm rounded-lg px-3 py-2 border border-gray-700 focus:outline-none focus:border-indigo-500"
|
||||
>
|
||||
<option value="bridge">Wyoming STT (local)</option>
|
||||
<option value="webspeech">Web Speech API (browser)</option>
|
||||
</select>
|
||||
<p className="text-xs text-gray-500 mt-1">
|
||||
{settings.sttMode === 'bridge'
|
||||
? 'Uses Whisper via the local bridge server'
|
||||
: 'Uses browser built-in speech recognition'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
11
homeai-dashboard/src/components/StatusIndicator.jsx
Normal file
11
homeai-dashboard/src/components/StatusIndicator.jsx
Normal file
@@ -0,0 +1,11 @@
|
||||
export default function StatusIndicator({ isOnline }) {
|
||||
if (isOnline === null) {
|
||||
return <span className="inline-block w-2.5 h-2.5 rounded-full bg-gray-500 animate-pulse" title="Checking..." />
|
||||
}
|
||||
return (
|
||||
<span
|
||||
className={`inline-block w-2.5 h-2.5 rounded-full ${isOnline ? 'bg-emerald-400' : 'bg-red-400'}`}
|
||||
title={isOnline ? 'Bridge online' : 'Bridge offline'}
|
||||
/>
|
||||
)
|
||||
}
|
||||
14
homeai-dashboard/src/components/ThinkingIndicator.jsx
Normal file
14
homeai-dashboard/src/components/ThinkingIndicator.jsx
Normal file
@@ -0,0 +1,14 @@
|
||||
export default function ThinkingIndicator() {
|
||||
return (
|
||||
<div className="flex items-start gap-3 px-4 py-3">
|
||||
<div className="w-8 h-8 rounded-full bg-indigo-600/20 flex items-center justify-center shrink-0">
|
||||
<span className="text-indigo-400 text-sm">AI</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-1 pt-2.5">
|
||||
<span className="w-2 h-2 rounded-full bg-gray-400 animate-[bounce_1.4s_ease-in-out_infinite]" />
|
||||
<span className="w-2 h-2 rounded-full bg-gray-400 animate-[bounce_1.4s_ease-in-out_0.2s_infinite]" />
|
||||
<span className="w-2 h-2 rounded-full bg-gray-400 animate-[bounce_1.4s_ease-in-out_0.4s_infinite]" />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
32
homeai-dashboard/src/components/VoiceButton.jsx
Normal file
32
homeai-dashboard/src/components/VoiceButton.jsx
Normal file
@@ -0,0 +1,32 @@
|
||||
export default function VoiceButton({ isRecording, isTranscribing, onToggle, disabled }) {
|
||||
const handleClick = () => {
|
||||
if (disabled || isTranscribing) return
|
||||
onToggle()
|
||||
}
|
||||
|
||||
return (
|
||||
<button
|
||||
onClick={handleClick}
|
||||
disabled={disabled || isTranscribing}
|
||||
className={`w-10 h-10 rounded-full flex items-center justify-center transition-all shrink-0 ${
|
||||
isRecording
|
||||
? 'bg-red-500 text-white shadow-[0_0_0_4px_rgba(239,68,68,0.3)] animate-pulse'
|
||||
: isTranscribing
|
||||
? 'bg-gray-700 text-gray-400 cursor-wait'
|
||||
: 'bg-gray-800 text-gray-400 hover:bg-gray-700 hover:text-gray-200'
|
||||
}`}
|
||||
title={isRecording ? 'Stop recording' : isTranscribing ? 'Transcribing...' : 'Start recording (Space)'}
|
||||
>
|
||||
{isTranscribing ? (
|
||||
<svg className="w-5 h-5 animate-spin" fill="none" viewBox="0 0 24 24">
|
||||
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4" />
|
||||
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4z" />
|
||||
</svg>
|
||||
) : (
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M12 18.75a6 6 0 006-6v-1.5m-6 7.5a6 6 0 01-6-6v-1.5m6 7.5v3.75m-3.75 0h7.5M12 15.75a3 3 0 01-3-3V4.5a3 3 0 116 0v8.25a3 3 0 01-3 3z" />
|
||||
</svg>
|
||||
)}
|
||||
</button>
|
||||
)
|
||||
}
|
||||
18
homeai-dashboard/src/hooks/useBridgeHealth.js
Normal file
18
homeai-dashboard/src/hooks/useBridgeHealth.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { useState, useEffect, useRef } from 'react'
|
||||
import { healthCheck } from '../lib/api'
|
||||
|
||||
export function useBridgeHealth() {
|
||||
const [isOnline, setIsOnline] = useState(null)
|
||||
const intervalRef = useRef(null)
|
||||
|
||||
useEffect(() => {
|
||||
const check = async () => {
|
||||
setIsOnline(await healthCheck())
|
||||
}
|
||||
check()
|
||||
intervalRef.current = setInterval(check, 15000)
|
||||
return () => clearInterval(intervalRef.current)
|
||||
}, [])
|
||||
|
||||
return isOnline
|
||||
}
|
||||
45
homeai-dashboard/src/hooks/useChat.js
Normal file
45
homeai-dashboard/src/hooks/useChat.js
Normal file
@@ -0,0 +1,45 @@
|
||||
import { useState, useCallback } from 'react'
|
||||
import { sendMessage } from '../lib/api'
|
||||
|
||||
export function useChat() {
|
||||
const [messages, setMessages] = useState([])
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
|
||||
const send = useCallback(async (text) => {
|
||||
if (!text.trim() || isLoading) return null
|
||||
|
||||
const userMsg = { id: Date.now(), role: 'user', content: text.trim(), timestamp: new Date() }
|
||||
setMessages((prev) => [...prev, userMsg])
|
||||
setIsLoading(true)
|
||||
|
||||
try {
|
||||
const response = await sendMessage(text.trim())
|
||||
const assistantMsg = {
|
||||
id: Date.now() + 1,
|
||||
role: 'assistant',
|
||||
content: response,
|
||||
timestamp: new Date(),
|
||||
}
|
||||
setMessages((prev) => [...prev, assistantMsg])
|
||||
return response
|
||||
} catch (err) {
|
||||
const errorMsg = {
|
||||
id: Date.now() + 1,
|
||||
role: 'assistant',
|
||||
content: `Error: ${err.message}`,
|
||||
timestamp: new Date(),
|
||||
isError: true,
|
||||
}
|
||||
setMessages((prev) => [...prev, errorMsg])
|
||||
return null
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [isLoading])
|
||||
|
||||
const clearHistory = useCallback(() => {
|
||||
setMessages([])
|
||||
}, [])
|
||||
|
||||
return { messages, isLoading, send, clearHistory }
|
||||
}
|
||||
27
homeai-dashboard/src/hooks/useSettings.js
Normal file
27
homeai-dashboard/src/hooks/useSettings.js
Normal file
@@ -0,0 +1,27 @@
|
||||
import { useState, useCallback } from 'react'
|
||||
import { DEFAULT_SETTINGS } from '../lib/constants'
|
||||
|
||||
const STORAGE_KEY = 'homeai_dashboard_settings'
|
||||
|
||||
function loadSettings() {
|
||||
try {
|
||||
const stored = localStorage.getItem(STORAGE_KEY)
|
||||
return stored ? { ...DEFAULT_SETTINGS, ...JSON.parse(stored) } : { ...DEFAULT_SETTINGS }
|
||||
} catch {
|
||||
return { ...DEFAULT_SETTINGS }
|
||||
}
|
||||
}
|
||||
|
||||
export function useSettings() {
|
||||
const [settings, setSettings] = useState(loadSettings)
|
||||
|
||||
const updateSetting = useCallback((key, value) => {
|
||||
setSettings((prev) => {
|
||||
const next = { ...prev, [key]: value }
|
||||
localStorage.setItem(STORAGE_KEY, JSON.stringify(next))
|
||||
return next
|
||||
})
|
||||
}, [])
|
||||
|
||||
return { settings, updateSetting }
|
||||
}
|
||||
56
homeai-dashboard/src/hooks/useTtsPlayback.js
Normal file
56
homeai-dashboard/src/hooks/useTtsPlayback.js
Normal file
@@ -0,0 +1,56 @@
|
||||
import { useState, useRef, useCallback } from 'react'
|
||||
import { synthesize } from '../lib/api'
|
||||
|
||||
export function useTtsPlayback(voice) {
|
||||
const [isPlaying, setIsPlaying] = useState(false)
|
||||
const audioCtxRef = useRef(null)
|
||||
const sourceRef = useRef(null)
|
||||
|
||||
const getAudioContext = () => {
|
||||
if (!audioCtxRef.current || audioCtxRef.current.state === 'closed') {
|
||||
audioCtxRef.current = new AudioContext()
|
||||
}
|
||||
return audioCtxRef.current
|
||||
}
|
||||
|
||||
const speak = useCallback(async (text) => {
|
||||
if (!text) return
|
||||
|
||||
// Stop any current playback
|
||||
if (sourceRef.current) {
|
||||
try { sourceRef.current.stop() } catch {}
|
||||
}
|
||||
|
||||
setIsPlaying(true)
|
||||
try {
|
||||
const audioData = await synthesize(text, voice)
|
||||
const ctx = getAudioContext()
|
||||
if (ctx.state === 'suspended') await ctx.resume()
|
||||
|
||||
const audioBuffer = await ctx.decodeAudioData(audioData)
|
||||
const source = ctx.createBufferSource()
|
||||
source.buffer = audioBuffer
|
||||
source.connect(ctx.destination)
|
||||
sourceRef.current = source
|
||||
|
||||
source.onended = () => {
|
||||
setIsPlaying(false)
|
||||
sourceRef.current = null
|
||||
}
|
||||
source.start()
|
||||
} catch (err) {
|
||||
console.error('TTS playback error:', err)
|
||||
setIsPlaying(false)
|
||||
}
|
||||
}, [voice])
|
||||
|
||||
const stop = useCallback(() => {
|
||||
if (sourceRef.current) {
|
||||
try { sourceRef.current.stop() } catch {}
|
||||
sourceRef.current = null
|
||||
}
|
||||
setIsPlaying(false)
|
||||
}, [])
|
||||
|
||||
return { isPlaying, speak, stop }
|
||||
}
|
||||
91
homeai-dashboard/src/hooks/useVoiceInput.js
Normal file
91
homeai-dashboard/src/hooks/useVoiceInput.js
Normal file
@@ -0,0 +1,91 @@
|
||||
import { useState, useRef, useCallback } from 'react'
|
||||
import { createRecorder } from '../lib/audio'
|
||||
import { transcribe } from '../lib/api'
|
||||
|
||||
export function useVoiceInput(sttMode = 'bridge') {
|
||||
const [isRecording, setIsRecording] = useState(false)
|
||||
const [isTranscribing, setIsTranscribing] = useState(false)
|
||||
const recorderRef = useRef(null)
|
||||
const webSpeechRef = useRef(null)
|
||||
|
||||
const startRecording = useCallback(async () => {
|
||||
if (isRecording) return
|
||||
|
||||
if (sttMode === 'webspeech' && 'webkitSpeechRecognition' in window) {
|
||||
return startWebSpeech()
|
||||
}
|
||||
|
||||
try {
|
||||
const recorder = createRecorder()
|
||||
recorderRef.current = recorder
|
||||
await recorder.start()
|
||||
setIsRecording(true)
|
||||
} catch (err) {
|
||||
console.error('Mic access error:', err)
|
||||
}
|
||||
}, [isRecording, sttMode])
|
||||
|
||||
const stopRecording = useCallback(async () => {
|
||||
if (!isRecording) return null
|
||||
|
||||
if (sttMode === 'webspeech' && webSpeechRef.current) {
|
||||
return stopWebSpeech()
|
||||
}
|
||||
|
||||
setIsRecording(false)
|
||||
setIsTranscribing(true)
|
||||
|
||||
try {
|
||||
const wavBlob = await recorderRef.current.stop()
|
||||
recorderRef.current = null
|
||||
const text = await transcribe(wavBlob)
|
||||
return text
|
||||
} catch (err) {
|
||||
console.error('Transcription error:', err)
|
||||
return null
|
||||
} finally {
|
||||
setIsTranscribing(false)
|
||||
}
|
||||
}, [isRecording, sttMode])
|
||||
|
||||
function startWebSpeech() {
|
||||
return new Promise((resolve) => {
|
||||
const SpeechRecognition = window.webkitSpeechRecognition || window.SpeechRecognition
|
||||
const recognition = new SpeechRecognition()
|
||||
recognition.continuous = false
|
||||
recognition.interimResults = false
|
||||
recognition.lang = 'en-US'
|
||||
webSpeechRef.current = { recognition, resolve: null }
|
||||
recognition.start()
|
||||
setIsRecording(true)
|
||||
resolve()
|
||||
})
|
||||
}
|
||||
|
||||
function stopWebSpeech() {
|
||||
return new Promise((resolve) => {
|
||||
const { recognition } = webSpeechRef.current
|
||||
recognition.onresult = (e) => {
|
||||
const text = e.results[0]?.[0]?.transcript || ''
|
||||
setIsRecording(false)
|
||||
webSpeechRef.current = null
|
||||
resolve(text)
|
||||
}
|
||||
recognition.onerror = () => {
|
||||
setIsRecording(false)
|
||||
webSpeechRef.current = null
|
||||
resolve(null)
|
||||
}
|
||||
recognition.onend = () => {
|
||||
setIsRecording(false)
|
||||
if (webSpeechRef.current) {
|
||||
webSpeechRef.current = null
|
||||
resolve(null)
|
||||
}
|
||||
}
|
||||
recognition.stop()
|
||||
})
|
||||
}
|
||||
|
||||
return { isRecording, isTranscribing, startRecording, stopRecording }
|
||||
}
|
||||
35
homeai-dashboard/src/index.css
Normal file
35
homeai-dashboard/src/index.css
Normal file
@@ -0,0 +1,35 @@
|
||||
@import "tailwindcss";
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
background-color: #030712;
|
||||
color: #f3f4f6;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
}
|
||||
|
||||
#root {
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
/* Scrollbar styling for dark theme */
|
||||
::-webkit-scrollbar {
|
||||
width: 8px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-track {
|
||||
background: #0a0a0f;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb {
|
||||
background: #374151;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb:hover {
|
||||
background: #4b5563;
|
||||
}
|
||||
|
||||
::selection {
|
||||
background: rgba(99, 102, 241, 0.3);
|
||||
}
|
||||
13
homeai-dashboard/src/lib/SchemaValidator.js
Normal file
13
homeai-dashboard/src/lib/SchemaValidator.js
Normal file
@@ -0,0 +1,13 @@
|
||||
import Ajv from 'ajv'
|
||||
import schema from '../../schema/character.schema.json'
|
||||
|
||||
const ajv = new Ajv({ allErrors: true, strict: false })
|
||||
const validate = ajv.compile(schema)
|
||||
|
||||
export function validateCharacter(config) {
|
||||
const valid = validate(config)
|
||||
if (!valid) {
|
||||
throw new Error(ajv.errorsText(validate.errors))
|
||||
}
|
||||
return true
|
||||
}
|
||||
44
homeai-dashboard/src/lib/api.js
Normal file
44
homeai-dashboard/src/lib/api.js
Normal file
@@ -0,0 +1,44 @@
|
||||
export async function sendMessage(text) {
|
||||
const res = await fetch('/api/agent/message', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ message: text, agent: 'main' }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const err = await res.json().catch(() => ({ error: 'Request failed' }))
|
||||
throw new Error(err.error || `HTTP ${res.status}`)
|
||||
}
|
||||
const data = await res.json()
|
||||
return data.response
|
||||
}
|
||||
|
||||
export async function synthesize(text, voice) {
|
||||
const res = await fetch('/api/tts', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ text, voice }),
|
||||
})
|
||||
if (!res.ok) throw new Error('TTS failed')
|
||||
return await res.arrayBuffer()
|
||||
}
|
||||
|
||||
export async function transcribe(wavBlob) {
|
||||
const res = await fetch('/api/stt', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'audio/wav' },
|
||||
body: wavBlob,
|
||||
})
|
||||
if (!res.ok) throw new Error('STT failed')
|
||||
const data = await res.json()
|
||||
return data.text
|
||||
}
|
||||
|
||||
export async function healthCheck() {
|
||||
try {
|
||||
const res = await fetch('/api/health?url=' + encodeURIComponent('http://localhost:8081/'), { signal: AbortSignal.timeout(5000) })
|
||||
const data = await res.json()
|
||||
return data.status === 'online'
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
92
homeai-dashboard/src/lib/audio.js
Normal file
92
homeai-dashboard/src/lib/audio.js
Normal file
@@ -0,0 +1,92 @@
|
||||
const TARGET_RATE = 16000
|
||||
|
||||
export function createRecorder() {
|
||||
let audioCtx
|
||||
let source
|
||||
let processor
|
||||
let stream
|
||||
let samples = []
|
||||
|
||||
async function start() {
|
||||
samples = []
|
||||
stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: { channelCount: 1, sampleRate: TARGET_RATE },
|
||||
})
|
||||
audioCtx = new AudioContext({ sampleRate: TARGET_RATE })
|
||||
source = audioCtx.createMediaStreamSource(stream)
|
||||
|
||||
processor = audioCtx.createScriptProcessor(4096, 1, 1)
|
||||
processor.onaudioprocess = (e) => {
|
||||
const input = e.inputBuffer.getChannelData(0)
|
||||
samples.push(new Float32Array(input))
|
||||
}
|
||||
source.connect(processor)
|
||||
processor.connect(audioCtx.destination)
|
||||
}
|
||||
|
||||
async function stop() {
|
||||
processor.disconnect()
|
||||
source.disconnect()
|
||||
stream.getTracks().forEach((t) => t.stop())
|
||||
await audioCtx.close()
|
||||
|
||||
const totalLength = samples.reduce((acc, s) => acc + s.length, 0)
|
||||
const merged = new Float32Array(totalLength)
|
||||
let offset = 0
|
||||
for (const chunk of samples) {
|
||||
merged.set(chunk, offset)
|
||||
offset += chunk.length
|
||||
}
|
||||
|
||||
const resampled = audioCtx.sampleRate !== TARGET_RATE
|
||||
? resample(merged, audioCtx.sampleRate, TARGET_RATE)
|
||||
: merged
|
||||
|
||||
return encodeWav(resampled, TARGET_RATE)
|
||||
}
|
||||
|
||||
return { start, stop }
|
||||
}
|
||||
|
||||
function resample(samples, fromRate, toRate) {
|
||||
const ratio = fromRate / toRate
|
||||
const newLength = Math.round(samples.length / ratio)
|
||||
const result = new Float32Array(newLength)
|
||||
for (let i = 0; i < newLength; i++) {
|
||||
result[i] = samples[Math.round(i * ratio)]
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
function encodeWav(samples, sampleRate) {
|
||||
const numSamples = samples.length
|
||||
const buffer = new ArrayBuffer(44 + numSamples * 2)
|
||||
const view = new DataView(buffer)
|
||||
|
||||
writeString(view, 0, 'RIFF')
|
||||
view.setUint32(4, 36 + numSamples * 2, true)
|
||||
writeString(view, 8, 'WAVE')
|
||||
writeString(view, 12, 'fmt ')
|
||||
view.setUint32(16, 16, true)
|
||||
view.setUint16(20, 1, true)
|
||||
view.setUint16(22, 1, true)
|
||||
view.setUint32(24, sampleRate, true)
|
||||
view.setUint32(28, sampleRate * 2, true)
|
||||
view.setUint16(32, 2, true)
|
||||
view.setUint16(34, 16, true)
|
||||
writeString(view, 36, 'data')
|
||||
view.setUint32(40, numSamples * 2, true)
|
||||
|
||||
for (let i = 0; i < numSamples; i++) {
|
||||
const s = Math.max(-1, Math.min(1, samples[i]))
|
||||
view.setInt16(44 + i * 2, s < 0 ? s * 0x8000 : s * 0x7fff, true)
|
||||
}
|
||||
|
||||
return new Blob([buffer], { type: 'audio/wav' })
|
||||
}
|
||||
|
||||
function writeString(view, offset, str) {
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
view.setUint8(offset + i, str.charCodeAt(i))
|
||||
}
|
||||
}
|
||||
37
homeai-dashboard/src/lib/constants.js
Normal file
37
homeai-dashboard/src/lib/constants.js
Normal file
@@ -0,0 +1,37 @@
|
||||
export const DEFAULT_VOICE = 'af_heart'
|
||||
|
||||
export const VOICES = [
|
||||
{ id: 'af_heart', label: 'Heart (F, US)' },
|
||||
{ id: 'af_alloy', label: 'Alloy (F, US)' },
|
||||
{ id: 'af_aoede', label: 'Aoede (F, US)' },
|
||||
{ id: 'af_bella', label: 'Bella (F, US)' },
|
||||
{ id: 'af_jessica', label: 'Jessica (F, US)' },
|
||||
{ id: 'af_kore', label: 'Kore (F, US)' },
|
||||
{ id: 'af_nicole', label: 'Nicole (F, US)' },
|
||||
{ id: 'af_nova', label: 'Nova (F, US)' },
|
||||
{ id: 'af_river', label: 'River (F, US)' },
|
||||
{ id: 'af_sarah', label: 'Sarah (F, US)' },
|
||||
{ id: 'af_sky', label: 'Sky (F, US)' },
|
||||
{ id: 'am_adam', label: 'Adam (M, US)' },
|
||||
{ id: 'am_echo', label: 'Echo (M, US)' },
|
||||
{ id: 'am_eric', label: 'Eric (M, US)' },
|
||||
{ id: 'am_fenrir', label: 'Fenrir (M, US)' },
|
||||
{ id: 'am_liam', label: 'Liam (M, US)' },
|
||||
{ id: 'am_michael', label: 'Michael (M, US)' },
|
||||
{ id: 'am_onyx', label: 'Onyx (M, US)' },
|
||||
{ id: 'am_puck', label: 'Puck (M, US)' },
|
||||
{ id: 'bf_alice', label: 'Alice (F, UK)' },
|
||||
{ id: 'bf_emma', label: 'Emma (F, UK)' },
|
||||
{ id: 'bf_isabella', label: 'Isabella (F, UK)' },
|
||||
{ id: 'bf_lily', label: 'Lily (F, UK)' },
|
||||
{ id: 'bm_daniel', label: 'Daniel (M, UK)' },
|
||||
{ id: 'bm_fable', label: 'Fable (M, UK)' },
|
||||
{ id: 'bm_george', label: 'George (M, UK)' },
|
||||
{ id: 'bm_lewis', label: 'Lewis (M, UK)' },
|
||||
]
|
||||
|
||||
export const DEFAULT_SETTINGS = {
|
||||
voice: DEFAULT_VOICE,
|
||||
autoTts: true,
|
||||
sttMode: 'bridge',
|
||||
}
|
||||
10
homeai-dashboard/src/main.jsx
Normal file
10
homeai-dashboard/src/main.jsx
Normal file
@@ -0,0 +1,10 @@
|
||||
import { StrictMode } from 'react'
|
||||
import { createRoot } from 'react-dom/client'
|
||||
import './index.css'
|
||||
import App from './App.jsx'
|
||||
|
||||
createRoot(document.getElementById('root')).render(
|
||||
<StrictMode>
|
||||
<App />
|
||||
</StrictMode>,
|
||||
)
|
||||
292
homeai-dashboard/src/pages/Characters.jsx
Normal file
292
homeai-dashboard/src/pages/Characters.jsx
Normal file
@@ -0,0 +1,292 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { validateCharacter } from '../lib/SchemaValidator';
|
||||
|
||||
const STORAGE_KEY = 'homeai_characters';
|
||||
const ACTIVE_KEY = 'homeai_active_character';
|
||||
|
||||
function loadProfiles() {
|
||||
try {
|
||||
const raw = localStorage.getItem(STORAGE_KEY);
|
||||
return raw ? JSON.parse(raw) : [];
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function saveProfiles(profiles) {
|
||||
localStorage.setItem(STORAGE_KEY, JSON.stringify(profiles));
|
||||
}
|
||||
|
||||
function getActiveId() {
|
||||
return localStorage.getItem(ACTIVE_KEY) || null;
|
||||
}
|
||||
|
||||
function setActiveId(id) {
|
||||
localStorage.setItem(ACTIVE_KEY, id);
|
||||
}
|
||||
|
||||
export default function Characters() {
|
||||
const [profiles, setProfiles] = useState(loadProfiles);
|
||||
const [activeId, setActive] = useState(getActiveId);
|
||||
const [error, setError] = useState(null);
|
||||
const [dragOver, setDragOver] = useState(false);
|
||||
const navigate = useNavigate();
|
||||
|
||||
useEffect(() => {
|
||||
saveProfiles(profiles);
|
||||
}, [profiles]);
|
||||
|
||||
const handleImport = (e) => {
|
||||
const files = Array.from(e.target?.files || []);
|
||||
importFiles(files);
|
||||
if (e.target) e.target.value = '';
|
||||
};
|
||||
|
||||
const importFiles = (files) => {
|
||||
files.forEach(file => {
|
||||
if (!file.name.endsWith('.json')) return;
|
||||
const reader = new FileReader();
|
||||
reader.onload = (ev) => {
|
||||
try {
|
||||
const data = JSON.parse(ev.target.result);
|
||||
validateCharacter(data);
|
||||
const id = data.name + '_' + Date.now();
|
||||
setProfiles(prev => [...prev, { id, data, image: null, addedAt: new Date().toISOString() }]);
|
||||
setError(null);
|
||||
} catch (err) {
|
||||
setError(`Import failed for ${file.name}: ${err.message}`);
|
||||
}
|
||||
};
|
||||
reader.readAsText(file);
|
||||
});
|
||||
};
|
||||
|
||||
const handleDrop = (e) => {
|
||||
e.preventDefault();
|
||||
setDragOver(false);
|
||||
const files = Array.from(e.dataTransfer.files);
|
||||
importFiles(files);
|
||||
};
|
||||
|
||||
const handleImageUpload = (profileId, e) => {
|
||||
const file = e.target.files[0];
|
||||
if (!file) return;
|
||||
const reader = new FileReader();
|
||||
reader.onload = (ev) => {
|
||||
setProfiles(prev =>
|
||||
prev.map(p => p.id === profileId ? { ...p, image: ev.target.result } : p)
|
||||
);
|
||||
};
|
||||
reader.readAsDataURL(file);
|
||||
};
|
||||
|
||||
const removeProfile = (id) => {
|
||||
setProfiles(prev => prev.filter(p => p.id !== id));
|
||||
if (activeId === id) {
|
||||
setActive(null);
|
||||
localStorage.removeItem(ACTIVE_KEY);
|
||||
}
|
||||
};
|
||||
|
||||
const activateProfile = (id) => {
|
||||
setActive(id);
|
||||
setActiveId(id);
|
||||
};
|
||||
|
||||
const exportProfile = (profile) => {
|
||||
const dataStr = "data:text/json;charset=utf-8," + encodeURIComponent(JSON.stringify(profile.data, null, 2));
|
||||
const a = document.createElement('a');
|
||||
a.href = dataStr;
|
||||
a.download = `${profile.data.name || 'character'}.json`;
|
||||
a.click();
|
||||
};
|
||||
|
||||
const editProfile = (profile) => {
|
||||
sessionStorage.setItem('edit_character', JSON.stringify(profile.data));
|
||||
sessionStorage.setItem('edit_character_profile_id', profile.id);
|
||||
navigate('/editor');
|
||||
};
|
||||
|
||||
const activeProfile = profiles.find(p => p.id === activeId);
|
||||
|
||||
return (
|
||||
<div className="space-y-8">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<h1 className="text-3xl font-bold text-gray-100">Characters</h1>
|
||||
<p className="text-sm text-gray-500 mt-1">
|
||||
{profiles.length} profile{profiles.length !== 1 ? 's' : ''} stored
|
||||
{activeProfile && (
|
||||
<span className="ml-2 text-emerald-400">
|
||||
Active: {activeProfile.data.display_name || activeProfile.data.name}
|
||||
</span>
|
||||
)}
|
||||
</p>
|
||||
</div>
|
||||
<label className="flex items-center gap-2 px-4 py-2 bg-indigo-600 hover:bg-indigo-500 text-white rounded-lg cursor-pointer transition-colors">
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M12 4.5v15m7.5-7.5h-15" />
|
||||
</svg>
|
||||
Import JSON
|
||||
<input type="file" accept=".json" multiple className="hidden" onChange={handleImport} />
|
||||
</label>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="bg-red-900/30 border border-red-500/50 text-red-300 px-4 py-3 rounded-lg text-sm">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Drop zone */}
|
||||
<div
|
||||
onDragOver={(e) => { e.preventDefault(); setDragOver(true); }}
|
||||
onDragLeave={() => setDragOver(false)}
|
||||
onDrop={handleDrop}
|
||||
className={`border-2 border-dashed rounded-xl p-8 text-center transition-colors ${
|
||||
dragOver
|
||||
? 'border-indigo-500 bg-indigo-500/10'
|
||||
: 'border-gray-700 hover:border-gray-600'
|
||||
}`}
|
||||
>
|
||||
<svg className="w-10 h-10 mx-auto text-gray-600 mb-3" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M3 16.5v2.25A2.25 2.25 0 005.25 21h13.5A2.25 2.25 0 0021 18.75V16.5m-13.5-9L12 3m0 0l4.5 4.5M12 3v13.5" />
|
||||
</svg>
|
||||
<p className="text-gray-500 text-sm">Drop character JSON files here to import</p>
|
||||
</div>
|
||||
|
||||
{/* Profile grid */}
|
||||
{profiles.length === 0 ? (
|
||||
<div className="text-center py-16">
|
||||
<svg className="w-16 h-16 mx-auto text-gray-700 mb-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M15.75 6a3.75 3.75 0 11-7.5 0 3.75 3.75 0 017.5 0zM4.501 20.118a7.5 7.5 0 0114.998 0A17.933 17.933 0 0112 21.75c-2.676 0-5.216-.584-7.499-1.632z" />
|
||||
</svg>
|
||||
<p className="text-gray-500">No character profiles yet. Import a JSON file to get started.</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
|
||||
{profiles.map(profile => {
|
||||
const isActive = profile.id === activeId;
|
||||
const char = profile.data;
|
||||
return (
|
||||
<div
|
||||
key={profile.id}
|
||||
className={`relative rounded-xl border overflow-hidden transition-all duration-200 ${
|
||||
isActive
|
||||
? 'border-emerald-500/60 bg-emerald-500/5 ring-1 ring-emerald-500/30'
|
||||
: 'border-gray-700 bg-gray-800/50 hover:border-gray-600'
|
||||
}`}
|
||||
>
|
||||
{/* Image area */}
|
||||
<div className="relative h-48 bg-gray-900 flex items-center justify-center overflow-hidden group">
|
||||
{profile.image ? (
|
||||
<img
|
||||
src={profile.image}
|
||||
alt={char.display_name || char.name}
|
||||
className="w-full h-full object-cover"
|
||||
/>
|
||||
) : (
|
||||
<div className="text-6xl font-bold text-gray-700 select-none">
|
||||
{(char.display_name || char.name || '?')[0].toUpperCase()}
|
||||
</div>
|
||||
)}
|
||||
<label className="absolute inset-0 flex items-center justify-center bg-black/50 opacity-0 group-hover:opacity-100 transition-opacity cursor-pointer">
|
||||
<div className="text-center">
|
||||
<svg className="w-8 h-8 mx-auto text-white/80 mb-1" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M6.827 6.175A2.31 2.31 0 015.186 7.23c-.38.054-.757.112-1.134.175C2.999 7.58 2.25 8.507 2.25 9.574V18a2.25 2.25 0 002.25 2.25h15A2.25 2.25 0 0021.75 18V9.574c0-1.067-.75-1.994-1.802-2.169a47.865 47.865 0 00-1.134-.175 2.31 2.31 0 01-1.64-1.055l-.822-1.316a2.192 2.192 0 00-1.736-1.039 48.774 48.774 0 00-5.232 0 2.192 2.192 0 00-1.736 1.039l-.821 1.316z" />
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M16.5 12.75a4.5 4.5 0 11-9 0 4.5 4.5 0 019 0z" />
|
||||
</svg>
|
||||
<span className="text-xs text-white/70">Change image</span>
|
||||
</div>
|
||||
<input
|
||||
type="file"
|
||||
accept="image/*"
|
||||
className="hidden"
|
||||
onChange={(e) => handleImageUpload(profile.id, e)}
|
||||
/>
|
||||
</label>
|
||||
{isActive && (
|
||||
<span className="absolute top-2 right-2 px-2 py-0.5 bg-emerald-500 text-white text-xs font-medium rounded-full">
|
||||
Active
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Info */}
|
||||
<div className="p-4 space-y-3">
|
||||
<div>
|
||||
<h3 className="text-lg font-semibold text-gray-200">
|
||||
{char.display_name || char.name}
|
||||
</h3>
|
||||
<p className="text-xs text-gray-500 mt-0.5">{char.description}</p>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-wrap gap-1.5">
|
||||
<span className="px-2 py-0.5 bg-gray-700/70 text-gray-400 text-xs rounded-full">
|
||||
{char.tts?.engine || 'kokoro'}
|
||||
</span>
|
||||
<span className="px-2 py-0.5 bg-gray-700/70 text-gray-400 text-xs rounded-full">
|
||||
{char.model_overrides?.primary || 'default'}
|
||||
</span>
|
||||
{char.tts?.kokoro_voice && (
|
||||
<span className="px-2 py-0.5 bg-gray-700/70 text-gray-400 text-xs rounded-full">
|
||||
{char.tts.kokoro_voice}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="flex gap-2 pt-1">
|
||||
{!isActive ? (
|
||||
<button
|
||||
onClick={() => activateProfile(profile.id)}
|
||||
className="flex-1 px-3 py-1.5 bg-emerald-600 hover:bg-emerald-500 text-white text-sm rounded-lg transition-colors"
|
||||
>
|
||||
Activate
|
||||
</button>
|
||||
) : (
|
||||
<button
|
||||
disabled
|
||||
className="flex-1 px-3 py-1.5 bg-gray-700 text-gray-500 text-sm rounded-lg cursor-not-allowed"
|
||||
>
|
||||
Active
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
onClick={() => editProfile(profile)}
|
||||
className="px-3 py-1.5 bg-gray-700 hover:bg-gray-600 text-gray-300 text-sm rounded-lg transition-colors"
|
||||
title="Edit"
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M16.862 4.487l1.687-1.688a1.875 1.875 0 112.652 2.652L10.582 16.07a4.5 4.5 0 01-1.897 1.13L6 18l.8-2.685a4.5 4.5 0 011.13-1.897l8.932-8.931zm0 0L19.5 7.125M18 14v4.75A2.25 2.25 0 0115.75 21H5.25A2.25 2.25 0 013 18.75V8.25A2.25 2.25 0 015.25 6H10" />
|
||||
</svg>
|
||||
</button>
|
||||
<button
|
||||
onClick={() => exportProfile(profile)}
|
||||
className="px-3 py-1.5 bg-gray-700 hover:bg-gray-600 text-gray-300 text-sm rounded-lg transition-colors"
|
||||
title="Export"
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M3 16.5v2.25A2.25 2.25 0 005.25 21h13.5A2.25 2.25 0 0021 18.75V16.5M16.5 12L12 16.5m0 0L7.5 12m4.5 4.5V3" />
|
||||
</svg>
|
||||
</button>
|
||||
<button
|
||||
onClick={() => removeProfile(profile.id)}
|
||||
className="px-3 py-1.5 bg-gray-700 hover:bg-red-600 text-gray-300 hover:text-white text-sm rounded-lg transition-colors"
|
||||
title="Delete"
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M14.74 9l-.346 9m-4.788 0L9.26 9m9.968-3.21c.342.052.682.107 1.022.166m-1.022-.165L18.16 19.673a2.25 2.25 0 01-2.244 2.077H8.084a2.25 2.25 0 01-2.244-2.077L4.772 5.79m14.456 0a48.108 48.108 0 00-3.478-.397m-12 .562c.34-.059.68-.114 1.022-.165m0 0a48.11 48.11 0 013.478-.397m7.5 0v-.916c0-1.18-.91-2.164-2.09-2.201a51.964 51.964 0 00-3.32 0c-1.18.037-2.09 1.022-2.09 2.201v.916m7.5 0a48.667 48.667 0 00-7.5 0" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
115
homeai-dashboard/src/pages/Chat.jsx
Normal file
115
homeai-dashboard/src/pages/Chat.jsx
Normal file
@@ -0,0 +1,115 @@
|
||||
import { useState, useEffect, useCallback } from 'react'
|
||||
import ChatPanel from '../components/ChatPanel'
|
||||
import InputBar from '../components/InputBar'
|
||||
import StatusIndicator from '../components/StatusIndicator'
|
||||
import SettingsDrawer from '../components/SettingsDrawer'
|
||||
import { useSettings } from '../hooks/useSettings'
|
||||
import { useBridgeHealth } from '../hooks/useBridgeHealth'
|
||||
import { useChat } from '../hooks/useChat'
|
||||
import { useTtsPlayback } from '../hooks/useTtsPlayback'
|
||||
import { useVoiceInput } from '../hooks/useVoiceInput'
|
||||
|
||||
export default function Chat() {
|
||||
const { settings, updateSetting } = useSettings()
|
||||
const isOnline = useBridgeHealth()
|
||||
const { messages, isLoading, send, clearHistory } = useChat()
|
||||
const { isPlaying, speak, stop } = useTtsPlayback(settings.voice)
|
||||
const { isRecording, isTranscribing, startRecording, stopRecording } = useVoiceInput(settings.sttMode)
|
||||
const [settingsOpen, setSettingsOpen] = useState(false)
|
||||
|
||||
// Send a message and optionally speak the response
|
||||
const handleSend = useCallback(async (text) => {
|
||||
const response = await send(text)
|
||||
if (response && settings.autoTts) {
|
||||
speak(response)
|
||||
}
|
||||
}, [send, settings.autoTts, speak])
|
||||
|
||||
// Toggle voice recording
|
||||
const handleVoiceToggle = useCallback(async () => {
|
||||
if (isRecording) {
|
||||
const text = await stopRecording()
|
||||
if (text) {
|
||||
handleSend(text)
|
||||
}
|
||||
} else {
|
||||
startRecording()
|
||||
}
|
||||
}, [isRecording, stopRecording, startRecording, handleSend])
|
||||
|
||||
// Space bar push-to-talk when input not focused
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (e) => {
|
||||
if (e.code === 'Space' && e.target.tagName !== 'TEXTAREA' && e.target.tagName !== 'INPUT') {
|
||||
e.preventDefault()
|
||||
handleVoiceToggle()
|
||||
}
|
||||
}
|
||||
window.addEventListener('keydown', handleKeyDown)
|
||||
return () => window.removeEventListener('keydown', handleKeyDown)
|
||||
}, [handleVoiceToggle])
|
||||
|
||||
return (
|
||||
<div className="flex-1 flex flex-col min-h-0">
|
||||
{/* Status bar */}
|
||||
<header className="flex items-center justify-between px-4 py-2 border-b border-gray-800/50 shrink-0">
|
||||
<div className="flex items-center gap-2">
|
||||
<StatusIndicator isOnline={isOnline} />
|
||||
<span className="text-xs text-gray-500">
|
||||
{isOnline === null ? 'Connecting...' : isOnline ? 'Connected' : 'Offline'}
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
{messages.length > 0 && (
|
||||
<button
|
||||
onClick={clearHistory}
|
||||
className="text-xs text-gray-500 hover:text-gray-300 transition-colors px-2 py-1"
|
||||
title="Clear conversation"
|
||||
>
|
||||
Clear
|
||||
</button>
|
||||
)}
|
||||
{isPlaying && (
|
||||
<button
|
||||
onClick={stop}
|
||||
className="text-xs text-indigo-400 hover:text-indigo-300 transition-colors px-2 py-1"
|
||||
title="Stop speaking"
|
||||
>
|
||||
Stop audio
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
onClick={() => setSettingsOpen(true)}
|
||||
className="text-gray-500 hover:text-gray-300 transition-colors p-1"
|
||||
title="Settings"
|
||||
>
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M9.594 3.94c.09-.542.56-.94 1.11-.94h2.593c.55 0 1.02.398 1.11.94l.213 1.281c.063.374.313.686.645.87.074.04.147.083.22.127.325.196.72.257 1.075.124l1.217-.456a1.125 1.125 0 011.37.49l1.296 2.247a1.125 1.125 0 01-.26 1.431l-1.003.827c-.293.241-.438.613-.43.992a7.723 7.723 0 010 .255c-.008.378.137.75.43.991l1.004.827c.424.35.534.955.26 1.43l-1.298 2.247a1.125 1.125 0 01-1.369.491l-1.217-.456c-.355-.133-.75-.072-1.076.124a6.47 6.47 0 01-.22.128c-.331.183-.581.495-.644.869l-.213 1.281c-.09.543-.56.941-1.11.941h-2.594c-.55 0-1.019-.398-1.11-.94l-.213-1.281c-.062-.374-.312-.686-.644-.87a6.52 6.52 0 01-.22-.127c-.325-.196-.72-.257-1.076-.124l-1.217.456a1.125 1.125 0 01-1.369-.49l-1.297-2.247a1.125 1.125 0 01.26-1.431l1.004-.827c.292-.24.437-.613.43-.991a6.932 6.932 0 010-.255c.007-.38-.138-.751-.43-.992l-1.004-.827a1.125 1.125 0 01-.26-1.43l1.297-2.247a1.125 1.125 0 011.37-.491l1.216.456c.356.133.751.072 1.076-.124.072-.044.146-.086.22-.128.332-.183.582-.495.644-.869l.214-1.28z" />
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M15 12a3 3 0 11-6 0 3 3 0 016 0z" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
{/* Chat area */}
|
||||
<ChatPanel messages={messages} isLoading={isLoading} onReplay={speak} />
|
||||
|
||||
{/* Input */}
|
||||
<InputBar
|
||||
onSend={handleSend}
|
||||
onVoiceToggle={handleVoiceToggle}
|
||||
isLoading={isLoading}
|
||||
isRecording={isRecording}
|
||||
isTranscribing={isTranscribing}
|
||||
/>
|
||||
|
||||
{/* Settings drawer */}
|
||||
<SettingsDrawer
|
||||
isOpen={settingsOpen}
|
||||
onClose={() => setSettingsOpen(false)}
|
||||
settings={settings}
|
||||
onUpdate={updateSetting}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
376
homeai-dashboard/src/pages/Dashboard.jsx
Normal file
376
homeai-dashboard/src/pages/Dashboard.jsx
Normal file
@@ -0,0 +1,376 @@
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
|
||||
const SERVICES = [
|
||||
{
|
||||
name: 'Ollama',
|
||||
url: 'http://localhost:11434',
|
||||
healthPath: '/api/tags',
|
||||
uiUrl: null,
|
||||
description: 'Local LLM runtime',
|
||||
category: 'AI & LLM',
|
||||
restart: { type: 'launchd', id: 'gui/501/com.homeai.ollama' },
|
||||
},
|
||||
{
|
||||
name: 'Open WebUI',
|
||||
url: 'http://localhost:3030',
|
||||
healthPath: '/',
|
||||
uiUrl: 'http://localhost:3030',
|
||||
description: 'Chat interface',
|
||||
category: 'AI & LLM',
|
||||
restart: { type: 'docker', id: 'homeai-open-webui' },
|
||||
},
|
||||
{
|
||||
name: 'OpenClaw Gateway',
|
||||
url: 'http://localhost:8080',
|
||||
healthPath: '/',
|
||||
uiUrl: null,
|
||||
description: 'Agent gateway',
|
||||
category: 'Agent',
|
||||
restart: { type: 'launchd', id: 'gui/501/com.homeai.openclaw' },
|
||||
},
|
||||
{
|
||||
name: 'OpenClaw Bridge',
|
||||
url: 'http://localhost:8081',
|
||||
healthPath: '/',
|
||||
uiUrl: null,
|
||||
description: 'HTTP-to-CLI bridge',
|
||||
category: 'Agent',
|
||||
restart: { type: 'launchd', id: 'gui/501/com.homeai.openclaw-bridge' },
|
||||
},
|
||||
{
|
||||
name: 'Wyoming STT',
|
||||
url: 'http://localhost:10300',
|
||||
healthPath: '/',
|
||||
uiUrl: null,
|
||||
description: 'Whisper speech-to-text',
|
||||
category: 'Voice',
|
||||
tcp: true,
|
||||
restart: { type: 'launchd', id: 'gui/501/com.homeai.wyoming-stt' },
|
||||
},
|
||||
{
|
||||
name: 'Wyoming TTS',
|
||||
url: 'http://localhost:10301',
|
||||
healthPath: '/',
|
||||
uiUrl: null,
|
||||
description: 'Kokoro text-to-speech',
|
||||
category: 'Voice',
|
||||
tcp: true,
|
||||
restart: { type: 'launchd', id: 'gui/501/com.homeai.wyoming-tts' },
|
||||
},
|
||||
{
|
||||
name: 'Wyoming Satellite',
|
||||
url: 'http://localhost:10700',
|
||||
healthPath: '/',
|
||||
uiUrl: null,
|
||||
description: 'Mac Mini mic/speaker satellite',
|
||||
category: 'Voice',
|
||||
tcp: true,
|
||||
restart: { type: 'launchd', id: 'gui/501/com.homeai.wyoming-satellite' },
|
||||
},
|
||||
{
|
||||
name: 'Home Assistant',
|
||||
url: 'https://10.0.0.199:8123',
|
||||
healthPath: '/api/',
|
||||
uiUrl: 'https://10.0.0.199:8123',
|
||||
description: 'Smart home platform',
|
||||
category: 'Smart Home',
|
||||
},
|
||||
{
|
||||
name: 'Uptime Kuma',
|
||||
url: 'http://localhost:3001',
|
||||
healthPath: '/',
|
||||
uiUrl: 'http://localhost:3001',
|
||||
description: 'Service health monitoring',
|
||||
category: 'Infrastructure',
|
||||
restart: { type: 'docker', id: 'homeai-uptime-kuma' },
|
||||
},
|
||||
{
|
||||
name: 'n8n',
|
||||
url: 'http://localhost:5678',
|
||||
healthPath: '/',
|
||||
uiUrl: 'http://localhost:5678',
|
||||
description: 'Workflow automation',
|
||||
category: 'Infrastructure',
|
||||
restart: { type: 'docker', id: 'homeai-n8n' },
|
||||
},
|
||||
{
|
||||
name: 'code-server',
|
||||
url: 'http://localhost:8090',
|
||||
healthPath: '/',
|
||||
uiUrl: 'http://localhost:8090',
|
||||
description: 'Browser-based VS Code',
|
||||
category: 'Infrastructure',
|
||||
restart: { type: 'docker', id: 'homeai-code-server' },
|
||||
},
|
||||
{
|
||||
name: 'Portainer',
|
||||
url: 'https://10.0.0.199:9443',
|
||||
healthPath: '/',
|
||||
uiUrl: 'https://10.0.0.199:9443',
|
||||
description: 'Docker management',
|
||||
category: 'Infrastructure',
|
||||
},
|
||||
{
|
||||
name: 'Gitea',
|
||||
url: 'http://10.0.0.199:3000',
|
||||
healthPath: '/',
|
||||
uiUrl: 'http://10.0.0.199:3000',
|
||||
description: 'Self-hosted Git',
|
||||
category: 'Infrastructure',
|
||||
},
|
||||
];
|
||||
|
||||
const CATEGORY_ICONS = {
|
||||
'AI & LLM': (
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M9.813 15.904L9 18.75l-.813-2.846a4.5 4.5 0 00-3.09-3.09L2.25 12l2.846-.813a4.5 4.5 0 003.09-3.09L9 5.25l.813 2.846a4.5 4.5 0 003.09 3.09L15.75 12l-2.846.813a4.5 4.5 0 00-3.09 3.09zM18.259 8.715L18 9.75l-.259-1.035a3.375 3.375 0 00-2.455-2.456L14.25 6l1.036-.259a3.375 3.375 0 002.455-2.456L18 2.25l.259 1.035a3.375 3.375 0 002.455 2.456L21.75 6l-1.036.259a3.375 3.375 0 00-2.455 2.456zM16.894 20.567L16.5 21.75l-.394-1.183a2.25 2.25 0 00-1.423-1.423L13.5 18.75l1.183-.394a2.25 2.25 0 001.423-1.423l.394-1.183.394 1.183a2.25 2.25 0 001.423 1.423l1.183.394-1.183.394a2.25 2.25 0 00-1.423 1.423z" />
|
||||
</svg>
|
||||
),
|
||||
'Agent': (
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M8.25 3v1.5M4.5 8.25H3m18 0h-1.5M4.5 12H3m18 0h-1.5m-15 3.75H3m18 0h-1.5M8.25 19.5V21M12 3v1.5m0 15V21m3.75-18v1.5m0 15V21m-9-1.5h10.5a2.25 2.25 0 002.25-2.25V6.75a2.25 2.25 0 00-2.25-2.25H6.75A2.25 2.25 0 004.5 6.75v10.5a2.25 2.25 0 002.25 2.25zm.75-12h9v9h-9v-9z" />
|
||||
</svg>
|
||||
),
|
||||
'Voice': (
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M12 18.75a6 6 0 006-6v-1.5m-6 7.5a6 6 0 01-6-6v-1.5m6 7.5v3.75m-3.75 0h7.5M12 15.75a3 3 0 01-3-3V4.5a3 3 0 116 0v8.25a3 3 0 01-3 3z" />
|
||||
</svg>
|
||||
),
|
||||
'Smart Home': (
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M2.25 12l8.954-8.955c.44-.439 1.152-.439 1.591 0L21.75 12M4.5 9.75v10.125c0 .621.504 1.125 1.125 1.125H9.75v-4.875c0-.621.504-1.125 1.125-1.125h2.25c.621 0 1.125.504 1.125 1.125V21h4.125c.621 0 1.125-.504 1.125-1.125V9.75M8.25 21h8.25" />
|
||||
</svg>
|
||||
),
|
||||
'Infrastructure': (
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M5.25 14.25h13.5m-13.5 0a3 3 0 01-3-3m3 3a3 3 0 100 6h13.5a3 3 0 100-6m-16.5-3a3 3 0 013-3h13.5a3 3 0 013 3m-19.5 0a4.5 4.5 0 01.9-2.7L5.737 5.1a3.375 3.375 0 012.7-1.35h7.126c1.062 0 2.062.5 2.7 1.35l2.587 3.45a4.5 4.5 0 01.9 2.7m0 0a3 3 0 01-3 3m0 3h.008v.008h-.008v-.008zm0-6h.008v.008h-.008v-.008zm-3 6h.008v.008h-.008v-.008zm0-6h.008v.008h-.008v-.008z" />
|
||||
</svg>
|
||||
),
|
||||
};
|
||||
|
||||
function StatusDot({ status }) {
|
||||
const colors = {
|
||||
online: 'bg-emerald-400 shadow-emerald-400/50',
|
||||
offline: 'bg-red-400 shadow-red-400/50',
|
||||
checking: 'bg-amber-400 shadow-amber-400/50 animate-pulse',
|
||||
unknown: 'bg-gray-500',
|
||||
};
|
||||
return (
|
||||
<span className={`inline-block w-2.5 h-2.5 rounded-full shadow-lg ${colors[status] || colors.unknown}`} />
|
||||
);
|
||||
}
|
||||
|
||||
export default function Dashboard() {
|
||||
const [statuses, setStatuses] = useState(() =>
|
||||
Object.fromEntries(SERVICES.map(s => [s.name, { status: 'checking', lastCheck: null, responseTime: null }]))
|
||||
);
|
||||
const [lastRefresh, setLastRefresh] = useState(null);
|
||||
const [restarting, setRestarting] = useState({});
|
||||
|
||||
const checkService = useCallback(async (service) => {
|
||||
try {
|
||||
const target = encodeURIComponent(service.url + service.healthPath);
|
||||
const modeParam = service.tcp ? '&mode=tcp' : '';
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(() => controller.abort(), 8000);
|
||||
|
||||
const res = await fetch(`/api/health?url=${target}${modeParam}`, { signal: controller.signal });
|
||||
clearTimeout(timeout);
|
||||
|
||||
const data = await res.json();
|
||||
return { status: data.status, lastCheck: new Date(), responseTime: data.responseTime };
|
||||
} catch {
|
||||
return { status: 'offline', lastCheck: new Date(), responseTime: null };
|
||||
}
|
||||
}, []);
|
||||
|
||||
const refreshAll = useCallback(async () => {
|
||||
setStatuses(prev =>
|
||||
Object.fromEntries(Object.entries(prev).map(([k, v]) => [k, { ...v, status: 'checking' }]))
|
||||
);
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
SERVICES.map(async (service) => {
|
||||
const result = await checkService(service);
|
||||
return { name: service.name, ...result };
|
||||
})
|
||||
);
|
||||
|
||||
const newStatuses = {};
|
||||
for (const r of results) {
|
||||
if (r.status === 'fulfilled') {
|
||||
newStatuses[r.value.name] = {
|
||||
status: r.value.status,
|
||||
lastCheck: r.value.lastCheck,
|
||||
responseTime: r.value.responseTime,
|
||||
};
|
||||
}
|
||||
}
|
||||
setStatuses(prev => ({ ...prev, ...newStatuses }));
|
||||
setLastRefresh(new Date());
|
||||
}, [checkService]);
|
||||
|
||||
useEffect(() => {
|
||||
refreshAll();
|
||||
const interval = setInterval(refreshAll, 30000);
|
||||
return () => clearInterval(interval);
|
||||
}, [refreshAll]);
|
||||
|
||||
const restartService = useCallback(async (service) => {
|
||||
if (!service.restart) return;
|
||||
setRestarting(prev => ({ ...prev, [service.name]: true }));
|
||||
try {
|
||||
const res = await fetch('/api/service/restart', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(service.restart),
|
||||
});
|
||||
const data = await res.json();
|
||||
if (!data.ok) {
|
||||
console.error(`Restart failed for ${service.name}:`, data.error);
|
||||
}
|
||||
setTimeout(async () => {
|
||||
const result = await checkService(service);
|
||||
setStatuses(prev => ({ ...prev, [service.name]: result }));
|
||||
setRestarting(prev => ({ ...prev, [service.name]: false }));
|
||||
}, 3000);
|
||||
} catch (err) {
|
||||
console.error(`Restart failed for ${service.name}:`, err);
|
||||
setRestarting(prev => ({ ...prev, [service.name]: false }));
|
||||
}
|
||||
}, [checkService]);
|
||||
|
||||
const categories = [...new Set(SERVICES.map(s => s.category))];
|
||||
const onlineCount = Object.values(statuses).filter(s => s.status === 'online').length;
|
||||
const offlineCount = Object.values(statuses).filter(s => s.status === 'offline').length;
|
||||
const totalCount = SERVICES.length;
|
||||
const allOnline = onlineCount === totalCount;
|
||||
|
||||
return (
|
||||
<div className="space-y-8">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<h1 className="text-3xl font-bold text-gray-100">Service Status</h1>
|
||||
<p className="text-sm text-gray-500 mt-1">
|
||||
{onlineCount}/{totalCount} services online
|
||||
{lastRefresh && (
|
||||
<span className="ml-3">
|
||||
Last check: {lastRefresh.toLocaleTimeString()}
|
||||
</span>
|
||||
)}
|
||||
</p>
|
||||
</div>
|
||||
<button
|
||||
onClick={refreshAll}
|
||||
className="flex items-center gap-2 px-4 py-2 bg-gray-800 hover:bg-gray-700 text-gray-300 rounded-lg border border-gray-700 transition-colors"
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M16.023 9.348h4.992v-.001M2.985 19.644v-4.992m0 0h4.992m-4.993 0l3.181 3.183a8.25 8.25 0 0013.803-3.7M4.031 9.865a8.25 8.25 0 0113.803-3.7l3.181 3.182" />
|
||||
</svg>
|
||||
Refresh
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Summary bar */}
|
||||
<div className="h-2 rounded-full bg-gray-800 overflow-hidden flex">
|
||||
{allOnline ? (
|
||||
<div
|
||||
className="h-full bg-gradient-to-r from-purple-500 to-indigo-500 transition-all duration-500"
|
||||
style={{ width: '100%' }}
|
||||
/>
|
||||
) : (
|
||||
<>
|
||||
<div
|
||||
className="h-full bg-gradient-to-r from-emerald-500 to-emerald-400 transition-all duration-500"
|
||||
style={{ width: `${(onlineCount / totalCount) * 100}%` }}
|
||||
/>
|
||||
<div
|
||||
className="h-full bg-gradient-to-r from-red-500 to-red-400 transition-all duration-500"
|
||||
style={{ width: `${(offlineCount / totalCount) * 100}%` }}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Service grid by category */}
|
||||
{categories.map(category => (
|
||||
<div key={category}>
|
||||
<div className="flex items-center gap-2 mb-4">
|
||||
<span className="text-gray-400">{CATEGORY_ICONS[category]}</span>
|
||||
<h2 className="text-lg font-semibold text-gray-300">{category}</h2>
|
||||
</div>
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
|
||||
{SERVICES.filter(s => s.category === category).map(service => {
|
||||
const st = statuses[service.name] || { status: 'unknown' };
|
||||
return (
|
||||
<div
|
||||
key={service.name}
|
||||
className={`relative rounded-xl border p-4 transition-all duration-200 ${
|
||||
st.status === 'online'
|
||||
? 'bg-gray-800/50 border-gray-700 hover:border-emerald-500/50'
|
||||
: st.status === 'offline'
|
||||
? 'bg-gray-800/50 border-red-500/30 hover:border-red-500/50'
|
||||
: 'bg-gray-800/50 border-gray-700'
|
||||
}`}
|
||||
>
|
||||
<div className="flex items-start justify-between">
|
||||
<div className="flex-1">
|
||||
<div className="flex items-center gap-2">
|
||||
<StatusDot status={st.status} />
|
||||
<h3 className="font-medium text-gray-200">{service.name}</h3>
|
||||
</div>
|
||||
<p className="text-xs text-gray-500 mt-1">{service.description}</p>
|
||||
{st.responseTime !== null && (
|
||||
<p className="text-xs text-gray-600 mt-0.5">{st.responseTime}ms</p>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
{service.restart && st.status === 'offline' && (
|
||||
<button
|
||||
onClick={() => restartService(service)}
|
||||
disabled={restarting[service.name]}
|
||||
className="text-xs px-2.5 py-1 rounded-md bg-amber-600/80 hover:bg-amber-500 disabled:bg-gray-700 disabled:text-gray-500 text-white transition-colors flex items-center gap-1"
|
||||
>
|
||||
{restarting[service.name] ? (
|
||||
<>
|
||||
<svg className="w-3 h-3 animate-spin" fill="none" viewBox="0 0 24 24">
|
||||
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4" />
|
||||
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4z" />
|
||||
</svg>
|
||||
Restarting
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<svg className="w-3 h-3" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M5.636 18.364a9 9 0 010-12.728m12.728 0a9 9 0 010 12.728M12 9v3m0 0v3m0-3h3m-3 0H9" />
|
||||
</svg>
|
||||
Restart
|
||||
</>
|
||||
)}
|
||||
</button>
|
||||
)}
|
||||
{service.uiUrl && (
|
||||
<a
|
||||
href={service.uiUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-xs px-2.5 py-1 rounded-md bg-gray-700 hover:bg-gray-600 text-gray-300 transition-colors flex items-center gap-1"
|
||||
>
|
||||
Open
|
||||
<svg className="w-3 h-3" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M13.5 6H5.25A2.25 2.25 0 003 8.25v10.5A2.25 2.25 0 005.25 21h10.5A2.25 2.25 0 0018 18.75V10.5m-10.5 6L21 3m0 0h-5.25M21 3v5.25" />
|
||||
</svg>
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
581
homeai-dashboard/src/pages/Editor.jsx
Normal file
581
homeai-dashboard/src/pages/Editor.jsx
Normal file
@@ -0,0 +1,581 @@
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import { validateCharacter } from '../lib/SchemaValidator';
|
||||
|
||||
const DEFAULT_CHARACTER = {
|
||||
schema_version: 1,
|
||||
name: "aria",
|
||||
display_name: "Aria",
|
||||
description: "Default HomeAI assistant persona",
|
||||
system_prompt: "You are Aria, a warm, curious, and helpful AI assistant living in the home. You speak naturally and conversationally — never robotic. You are knowledgeable but never condescending. You remember the people you live with and build on those memories over time. Keep responses concise when controlling smart home devices; be more expressive in casual conversation. Never break character.",
|
||||
model_overrides: {
|
||||
primary: "llama3.3:70b",
|
||||
fast: "qwen2.5:7b"
|
||||
},
|
||||
tts: {
|
||||
engine: "kokoro",
|
||||
kokoro_voice: "af_heart",
|
||||
speed: 1.0
|
||||
},
|
||||
live2d_expressions: {
|
||||
idle: "expr_idle",
|
||||
listening: "expr_listening",
|
||||
thinking: "expr_thinking",
|
||||
speaking: "expr_speaking",
|
||||
happy: "expr_happy",
|
||||
sad: "expr_sad",
|
||||
surprised: "expr_surprised",
|
||||
error: "expr_error"
|
||||
},
|
||||
vtube_ws_triggers: {
|
||||
thinking: { type: "hotkey", id: "expr_thinking" },
|
||||
speaking: { type: "hotkey", id: "expr_speaking" },
|
||||
idle: { type: "hotkey", id: "expr_idle" }
|
||||
},
|
||||
custom_rules: [
|
||||
{ trigger: "good morning", response: "Good morning! How did you sleep?", condition: "time_of_day == morning" }
|
||||
],
|
||||
notes: ""
|
||||
};
|
||||
|
||||
export default function Editor() {
|
||||
const [character, setCharacter] = useState(() => {
|
||||
const editData = sessionStorage.getItem('edit_character');
|
||||
if (editData) {
|
||||
sessionStorage.removeItem('edit_character');
|
||||
try {
|
||||
return JSON.parse(editData);
|
||||
} catch {
|
||||
return DEFAULT_CHARACTER;
|
||||
}
|
||||
}
|
||||
return DEFAULT_CHARACTER;
|
||||
});
|
||||
const [error, setError] = useState(null);
|
||||
const [saved, setSaved] = useState(false);
|
||||
|
||||
// TTS preview state
|
||||
const [ttsState, setTtsState] = useState('idle');
|
||||
const [previewText, setPreviewText] = useState('');
|
||||
const audioRef = useRef(null);
|
||||
const objectUrlRef = useRef(null);
|
||||
|
||||
// ElevenLabs state
|
||||
const [elevenLabsApiKey, setElevenLabsApiKey] = useState(localStorage.getItem('elevenlabs_api_key') || '');
|
||||
const [elevenLabsVoices, setElevenLabsVoices] = useState([]);
|
||||
const [elevenLabsModels, setElevenLabsModels] = useState([]);
|
||||
const [isLoadingElevenLabs, setIsLoadingElevenLabs] = useState(false);
|
||||
|
||||
const fetchElevenLabsData = async (key) => {
|
||||
if (!key) return;
|
||||
setIsLoadingElevenLabs(true);
|
||||
try {
|
||||
const headers = { 'xi-api-key': key };
|
||||
const [voicesRes, modelsRes] = await Promise.all([
|
||||
fetch('https://api.elevenlabs.io/v1/voices', { headers }),
|
||||
fetch('https://api.elevenlabs.io/v1/models', { headers })
|
||||
]);
|
||||
if (!voicesRes.ok || !modelsRes.ok) {
|
||||
throw new Error('Failed to fetch from ElevenLabs API (check API key)');
|
||||
}
|
||||
const voicesData = await voicesRes.json();
|
||||
const modelsData = await modelsRes.json();
|
||||
setElevenLabsVoices(voicesData.voices || []);
|
||||
setElevenLabsModels(modelsData.filter(m => m.can_do_text_to_speech) || []);
|
||||
localStorage.setItem('elevenlabs_api_key', key);
|
||||
} catch (err) {
|
||||
setError(err.message);
|
||||
} finally {
|
||||
setIsLoadingElevenLabs(false);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (elevenLabsApiKey && character.tts.engine === 'elevenlabs') {
|
||||
fetchElevenLabsData(elevenLabsApiKey);
|
||||
}
|
||||
}, [character.tts.engine]);
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (audioRef.current) { audioRef.current.pause(); audioRef.current = null; }
|
||||
if (objectUrlRef.current) { URL.revokeObjectURL(objectUrlRef.current); }
|
||||
window.speechSynthesis.cancel();
|
||||
};
|
||||
}, []);
|
||||
|
||||
const handleExport = () => {
|
||||
try {
|
||||
validateCharacter(character);
|
||||
setError(null);
|
||||
const dataStr = "data:text/json;charset=utf-8," + encodeURIComponent(JSON.stringify(character, null, 2));
|
||||
const a = document.createElement('a');
|
||||
a.href = dataStr;
|
||||
a.download = `${character.name || 'character'}.json`;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
a.remove();
|
||||
} catch (err) {
|
||||
setError(err.message);
|
||||
}
|
||||
};
|
||||
|
||||
const handleSaveToProfiles = () => {
|
||||
try {
|
||||
validateCharacter(character);
|
||||
setError(null);
|
||||
|
||||
const profileId = sessionStorage.getItem('edit_character_profile_id');
|
||||
const storageKey = 'homeai_characters';
|
||||
const raw = localStorage.getItem(storageKey);
|
||||
let profiles = raw ? JSON.parse(raw) : [];
|
||||
|
||||
if (profileId) {
|
||||
profiles = profiles.map(p =>
|
||||
p.id === profileId ? { ...p, data: character } : p
|
||||
);
|
||||
sessionStorage.removeItem('edit_character_profile_id');
|
||||
} else {
|
||||
const id = character.name + '_' + Date.now();
|
||||
profiles.push({ id, data: character, image: null, addedAt: new Date().toISOString() });
|
||||
}
|
||||
|
||||
localStorage.setItem(storageKey, JSON.stringify(profiles));
|
||||
setSaved(true);
|
||||
setTimeout(() => setSaved(false), 2000);
|
||||
} catch (err) {
|
||||
setError(err.message);
|
||||
}
|
||||
};
|
||||
|
||||
const handleImport = (e) => {
|
||||
const file = e.target.files[0];
|
||||
if (!file) return;
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => {
|
||||
try {
|
||||
const importedChar = JSON.parse(e.target.result);
|
||||
validateCharacter(importedChar);
|
||||
setCharacter(importedChar);
|
||||
setError(null);
|
||||
} catch (err) {
|
||||
setError(`Import failed: ${err.message}`);
|
||||
}
|
||||
};
|
||||
reader.readAsText(file);
|
||||
};
|
||||
|
||||
const handleChange = (field, value) => {
|
||||
setCharacter(prev => ({ ...prev, [field]: value }));
|
||||
};
|
||||
|
||||
const handleNestedChange = (parent, field, value) => {
|
||||
setCharacter(prev => ({
|
||||
...prev,
|
||||
[parent]: { ...prev[parent], [field]: value }
|
||||
}));
|
||||
};
|
||||
|
||||
const handleRuleChange = (index, field, value) => {
|
||||
setCharacter(prev => {
|
||||
const newRules = [...(prev.custom_rules || [])];
|
||||
newRules[index] = { ...newRules[index], [field]: value };
|
||||
return { ...prev, custom_rules: newRules };
|
||||
});
|
||||
};
|
||||
|
||||
const addRule = () => {
|
||||
setCharacter(prev => ({
|
||||
...prev,
|
||||
custom_rules: [...(prev.custom_rules || []), { trigger: "", response: "", condition: "" }]
|
||||
}));
|
||||
};
|
||||
|
||||
const removeRule = (index) => {
|
||||
setCharacter(prev => {
|
||||
const newRules = [...(prev.custom_rules || [])];
|
||||
newRules.splice(index, 1);
|
||||
return { ...prev, custom_rules: newRules };
|
||||
});
|
||||
};
|
||||
|
||||
const stopPreview = () => {
|
||||
if (audioRef.current) {
|
||||
audioRef.current.pause();
|
||||
audioRef.current = null;
|
||||
}
|
||||
if (objectUrlRef.current) {
|
||||
URL.revokeObjectURL(objectUrlRef.current);
|
||||
objectUrlRef.current = null;
|
||||
}
|
||||
window.speechSynthesis.cancel();
|
||||
setTtsState('idle');
|
||||
};
|
||||
|
||||
const previewTTS = async () => {
|
||||
stopPreview();
|
||||
const text = previewText || `Hi, I am ${character.display_name}. This is a preview of my voice.`;
|
||||
|
||||
if (character.tts.engine === 'kokoro') {
|
||||
setTtsState('loading');
|
||||
let blob;
|
||||
try {
|
||||
const response = await fetch('/api/tts', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ text, voice: character.tts.kokoro_voice })
|
||||
});
|
||||
if (!response.ok) throw new Error('TTS bridge returned ' + response.status);
|
||||
blob = await response.blob();
|
||||
} catch (err) {
|
||||
setTtsState('idle');
|
||||
setError(`Kokoro preview failed: ${err.message}. Falling back to browser TTS.`);
|
||||
runBrowserTTS(text);
|
||||
return;
|
||||
}
|
||||
const url = URL.createObjectURL(blob);
|
||||
objectUrlRef.current = url;
|
||||
const audio = new Audio(url);
|
||||
audio.playbackRate = character.tts.speed;
|
||||
audio.onended = () => { stopPreview(); };
|
||||
audio.onerror = () => { stopPreview(); };
|
||||
audioRef.current = audio;
|
||||
setTtsState('playing');
|
||||
audio.play().catch(() => {});
|
||||
} else {
|
||||
runBrowserTTS(text);
|
||||
}
|
||||
};
|
||||
|
||||
const runBrowserTTS = (text) => {
|
||||
const utterance = new SpeechSynthesisUtterance(text);
|
||||
utterance.rate = character.tts.speed;
|
||||
const voices = window.speechSynthesis.getVoices();
|
||||
const preferredVoice = voices.find(v => v.lang.startsWith('en') && v.name.includes('Female')) || voices.find(v => v.lang.startsWith('en'));
|
||||
if (preferredVoice) utterance.voice = preferredVoice;
|
||||
setTtsState('playing');
|
||||
utterance.onend = () => setTtsState('idle');
|
||||
window.speechSynthesis.cancel();
|
||||
window.speechSynthesis.speak(utterance);
|
||||
};
|
||||
|
||||
const inputClass = "w-full bg-gray-800 border border-gray-700 text-gray-200 p-2 rounded-lg focus:border-indigo-500 focus:ring-1 focus:ring-indigo-500 outline-none transition-colors";
|
||||
const selectClass = "w-full bg-gray-800 border border-gray-700 text-gray-200 p-2 rounded-lg focus:border-indigo-500 focus:ring-1 focus:ring-indigo-500 outline-none transition-colors";
|
||||
const labelClass = "block text-sm font-medium text-gray-400 mb-1";
|
||||
const cardClass = "bg-gray-900 border border-gray-800 p-5 rounded-xl space-y-4";
|
||||
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
<div className="flex justify-between items-center">
|
||||
<div>
|
||||
<h1 className="text-3xl font-bold text-gray-100">Character Editor</h1>
|
||||
<p className="text-sm text-gray-500 mt-1">
|
||||
Editing: {character.display_name || character.name}
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex gap-3">
|
||||
<label className="cursor-pointer flex items-center gap-2 px-4 py-2 bg-gray-800 hover:bg-gray-700 text-gray-300 rounded-lg border border-gray-700 transition-colors">
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M3 16.5v2.25A2.25 2.25 0 005.25 21h13.5A2.25 2.25 0 0021 18.75V16.5m-13.5-9L12 3m0 0l4.5 4.5M12 3v13.5" />
|
||||
</svg>
|
||||
Import
|
||||
<input type="file" accept=".json" className="hidden" onChange={handleImport} />
|
||||
</label>
|
||||
<button
|
||||
onClick={handleSaveToProfiles}
|
||||
className={`flex items-center gap-2 px-4 py-2 rounded-lg transition-colors ${
|
||||
saved
|
||||
? 'bg-emerald-600 text-white'
|
||||
: 'bg-indigo-600 hover:bg-indigo-500 text-white'
|
||||
}`}
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
{saved
|
||||
? <path strokeLinecap="round" strokeLinejoin="round" d="M4.5 12.75l6 6 9-13.5" />
|
||||
: <path strokeLinecap="round" strokeLinejoin="round" d="M17.593 3.322c1.1.128 1.907 1.077 1.907 2.185V21L12 17.25 4.5 21V5.507c0-1.108.806-2.057 1.907-2.185a48.507 48.507 0 0111.186 0z" />
|
||||
}
|
||||
</svg>
|
||||
{saved ? 'Saved' : 'Save to Profiles'}
|
||||
</button>
|
||||
<button
|
||||
onClick={handleExport}
|
||||
className="flex items-center gap-2 px-4 py-2 bg-gray-800 hover:bg-gray-700 text-gray-300 rounded-lg border border-gray-700 transition-colors"
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M3 16.5v2.25A2.25 2.25 0 005.25 21h13.5A2.25 2.25 0 0021 18.75V16.5M16.5 12L12 16.5m0 0L7.5 12m4.5 4.5V3" />
|
||||
</svg>
|
||||
Export JSON
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="bg-red-900/30 border border-red-500/50 text-red-300 px-4 py-3 rounded-lg text-sm">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-6">
|
||||
{/* Basic Info */}
|
||||
<div className={cardClass}>
|
||||
<h2 className="text-lg font-semibold text-gray-200">Basic Info</h2>
|
||||
<div>
|
||||
<label className={labelClass}>Name (ID)</label>
|
||||
<input type="text" className={inputClass} value={character.name} onChange={(e) => handleChange('name', e.target.value)} />
|
||||
</div>
|
||||
<div>
|
||||
<label className={labelClass}>Display Name</label>
|
||||
<input type="text" className={inputClass} value={character.display_name} onChange={(e) => handleChange('display_name', e.target.value)} />
|
||||
</div>
|
||||
<div>
|
||||
<label className={labelClass}>Description</label>
|
||||
<input type="text" className={inputClass} value={character.description} onChange={(e) => handleChange('description', e.target.value)} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* TTS Configuration */}
|
||||
<div className={cardClass}>
|
||||
<h2 className="text-lg font-semibold text-gray-200">TTS Configuration</h2>
|
||||
<div>
|
||||
<label className={labelClass}>Engine</label>
|
||||
<select className={selectClass} value={character.tts.engine} onChange={(e) => handleNestedChange('tts', 'engine', e.target.value)}>
|
||||
<option value="kokoro">Kokoro</option>
|
||||
<option value="chatterbox">Chatterbox</option>
|
||||
<option value="qwen3">Qwen3</option>
|
||||
<option value="elevenlabs">ElevenLabs</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
{character.tts.engine === 'elevenlabs' && (
|
||||
<div className="space-y-4 border border-gray-700 p-4 rounded-lg bg-gray-800/50">
|
||||
<div>
|
||||
<label className="block text-xs font-medium mb-1 text-gray-500">ElevenLabs API Key (Local Use Only)</label>
|
||||
<div className="flex gap-2">
|
||||
<input type="password" placeholder="sk_..." className={inputClass + " text-sm"} value={elevenLabsApiKey} onChange={(e) => setElevenLabsApiKey(e.target.value)} />
|
||||
<button onClick={() => fetchElevenLabsData(elevenLabsApiKey)} disabled={isLoadingElevenLabs} className="bg-indigo-600 text-white px-3 py-1 rounded-lg text-sm whitespace-nowrap hover:bg-indigo-500 disabled:opacity-50 transition-colors">
|
||||
{isLoadingElevenLabs ? 'Loading...' : 'Fetch'}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<label className={labelClass}>Voice ID</label>
|
||||
{elevenLabsVoices.length > 0 ? (
|
||||
<select className={selectClass} value={character.tts.elevenlabs_voice_id || ''} onChange={(e) => handleNestedChange('tts', 'elevenlabs_voice_id', e.target.value)}>
|
||||
<option value="">-- Select Voice --</option>
|
||||
{elevenLabsVoices.map(v => (
|
||||
<option key={v.voice_id} value={v.voice_id}>{v.name} ({v.category})</option>
|
||||
))}
|
||||
</select>
|
||||
) : (
|
||||
<input type="text" className={inputClass} value={character.tts.elevenlabs_voice_id || ''} onChange={(e) => handleNestedChange('tts', 'elevenlabs_voice_id', e.target.value)} placeholder="e.g. 21m00Tcm4TlvDq8ikWAM" />
|
||||
)}
|
||||
</div>
|
||||
<div>
|
||||
<label className={labelClass}>Model</label>
|
||||
{elevenLabsModels.length > 0 ? (
|
||||
<select className={selectClass} value={character.tts.elevenlabs_model || 'eleven_monolingual_v1'} onChange={(e) => handleNestedChange('tts', 'elevenlabs_model', e.target.value)}>
|
||||
<option value="">-- Select Model --</option>
|
||||
{elevenLabsModels.map(m => (
|
||||
<option key={m.model_id} value={m.model_id}>{m.name} ({m.model_id})</option>
|
||||
))}
|
||||
</select>
|
||||
) : (
|
||||
<input type="text" className={inputClass} value={character.tts.elevenlabs_model || 'eleven_monolingual_v1'} onChange={(e) => handleNestedChange('tts', 'elevenlabs_model', e.target.value)} placeholder="e.g. eleven_monolingual_v1" />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{character.tts.engine === 'kokoro' && (
|
||||
<div>
|
||||
<label className={labelClass}>Kokoro Voice</label>
|
||||
<select className={selectClass} value={character.tts.kokoro_voice || 'af_heart'} onChange={(e) => handleNestedChange('tts', 'kokoro_voice', e.target.value)}>
|
||||
<option value="af_heart">af_heart (American Female)</option>
|
||||
<option value="af_alloy">af_alloy (American Female)</option>
|
||||
<option value="af_aoede">af_aoede (American Female)</option>
|
||||
<option value="af_bella">af_bella (American Female)</option>
|
||||
<option value="af_jessica">af_jessica (American Female)</option>
|
||||
<option value="af_kore">af_kore (American Female)</option>
|
||||
<option value="af_nicole">af_nicole (American Female)</option>
|
||||
<option value="af_nova">af_nova (American Female)</option>
|
||||
<option value="af_river">af_river (American Female)</option>
|
||||
<option value="af_sarah">af_sarah (American Female)</option>
|
||||
<option value="af_sky">af_sky (American Female)</option>
|
||||
<option value="am_adam">am_adam (American Male)</option>
|
||||
<option value="am_echo">am_echo (American Male)</option>
|
||||
<option value="am_eric">am_eric (American Male)</option>
|
||||
<option value="am_fenrir">am_fenrir (American Male)</option>
|
||||
<option value="am_liam">am_liam (American Male)</option>
|
||||
<option value="am_michael">am_michael (American Male)</option>
|
||||
<option value="am_onyx">am_onyx (American Male)</option>
|
||||
<option value="am_puck">am_puck (American Male)</option>
|
||||
<option value="am_santa">am_santa (American Male)</option>
|
||||
<option value="bf_alice">bf_alice (British Female)</option>
|
||||
<option value="bf_emma">bf_emma (British Female)</option>
|
||||
<option value="bf_isabella">bf_isabella (British Female)</option>
|
||||
<option value="bf_lily">bf_lily (British Female)</option>
|
||||
<option value="bm_daniel">bm_daniel (British Male)</option>
|
||||
<option value="bm_fable">bm_fable (British Male)</option>
|
||||
<option value="bm_george">bm_george (British Male)</option>
|
||||
<option value="bm_lewis">bm_lewis (British Male)</option>
|
||||
</select>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{character.tts.engine === 'chatterbox' && (
|
||||
<div>
|
||||
<label className={labelClass}>Voice Reference Path</label>
|
||||
<input type="text" className={inputClass} value={character.tts.voice_ref_path || ''} onChange={(e) => handleNestedChange('tts', 'voice_ref_path', e.target.value)} />
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div>
|
||||
<label className={labelClass}>Speed: {character.tts.speed}</label>
|
||||
<input type="range" min="0.5" max="2.0" step="0.1" className="w-full accent-indigo-500" value={character.tts.speed} onChange={(e) => handleNestedChange('tts', 'speed', parseFloat(e.target.value))} />
|
||||
</div>
|
||||
<div>
|
||||
<label className={labelClass}>Preview Text</label>
|
||||
<input
|
||||
type="text"
|
||||
className={inputClass}
|
||||
value={previewText}
|
||||
onChange={(e) => setPreviewText(e.target.value)}
|
||||
placeholder={`Hi, I am ${character.display_name}. This is a preview of my voice.`}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex gap-2">
|
||||
<button
|
||||
onClick={previewTTS}
|
||||
disabled={ttsState === 'loading'}
|
||||
className={`flex-1 flex items-center justify-center gap-2 px-4 py-2 rounded-lg transition-colors ${
|
||||
ttsState === 'loading'
|
||||
? 'bg-indigo-800 text-indigo-300 cursor-wait'
|
||||
: ttsState === 'playing'
|
||||
? 'bg-emerald-600 hover:bg-emerald-500 text-white'
|
||||
: 'bg-indigo-600 hover:bg-indigo-500 text-white'
|
||||
}`}
|
||||
>
|
||||
{ttsState === 'loading' && (
|
||||
<svg className="w-4 h-4 animate-spin" viewBox="0 0 24 24" fill="none">
|
||||
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4" />
|
||||
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4z" />
|
||||
</svg>
|
||||
)}
|
||||
{ttsState === 'loading' ? 'Synthesizing...' : ttsState === 'playing' ? 'Playing...' : 'Preview Voice'}
|
||||
</button>
|
||||
{ttsState !== 'idle' && (
|
||||
<button
|
||||
onClick={stopPreview}
|
||||
className="px-4 py-2 bg-red-600 hover:bg-red-500 text-white rounded-lg transition-colors"
|
||||
>
|
||||
Stop
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
<p className="text-xs text-gray-600">
|
||||
{character.tts.engine === 'kokoro'
|
||||
? 'Previews via local Kokoro TTS bridge (port 8081).'
|
||||
: 'Uses browser TTS for preview. Local TTS available with Kokoro engine.'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* System Prompt */}
|
||||
<div className={cardClass}>
|
||||
<div className="flex justify-between items-center">
|
||||
<h2 className="text-lg font-semibold text-gray-200">System Prompt</h2>
|
||||
<span className="text-xs text-gray-600">{character.system_prompt.length} chars</span>
|
||||
</div>
|
||||
<textarea
|
||||
className={inputClass + " h-32 resize-y"}
|
||||
value={character.system_prompt}
|
||||
onChange={(e) => handleChange('system_prompt', e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-6">
|
||||
{/* Live2D Expressions */}
|
||||
<div className={cardClass}>
|
||||
<h2 className="text-lg font-semibold text-gray-200">Live2D Expressions</h2>
|
||||
{Object.entries(character.live2d_expressions).map(([key, val]) => (
|
||||
<div key={key} className="flex justify-between items-center gap-4">
|
||||
<label className="text-sm font-medium text-gray-400 w-1/3 capitalize">{key}</label>
|
||||
<input type="text" className={inputClass + " w-2/3"} value={val} onChange={(e) => handleNestedChange('live2d_expressions', key, e.target.value)} />
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* Model Overrides */}
|
||||
<div className={cardClass}>
|
||||
<h2 className="text-lg font-semibold text-gray-200">Model Overrides</h2>
|
||||
<div>
|
||||
<label className={labelClass}>Primary Model</label>
|
||||
<select className={selectClass} value={character.model_overrides?.primary || 'llama3.3:70b'} onChange={(e) => handleNestedChange('model_overrides', 'primary', e.target.value)}>
|
||||
<option value="llama3.3:70b">llama3.3:70b</option>
|
||||
<option value="qwen3.5:35b-a3b">qwen3.5:35b-a3b</option>
|
||||
<option value="qwen2.5:7b">qwen2.5:7b</option>
|
||||
<option value="qwen3:32b">qwen3:32b</option>
|
||||
<option value="codestral:22b">codestral:22b</option>
|
||||
</select>
|
||||
</div>
|
||||
<div>
|
||||
<label className={labelClass}>Fast Model</label>
|
||||
<select className={selectClass} value={character.model_overrides?.fast || 'qwen2.5:7b'} onChange={(e) => handleNestedChange('model_overrides', 'fast', e.target.value)}>
|
||||
<option value="qwen2.5:7b">qwen2.5:7b</option>
|
||||
<option value="qwen3.5:35b-a3b">qwen3.5:35b-a3b</option>
|
||||
<option value="llama3.3:70b">llama3.3:70b</option>
|
||||
<option value="qwen3:32b">qwen3:32b</option>
|
||||
<option value="codestral:22b">codestral:22b</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Custom Rules */}
|
||||
<div className={cardClass}>
|
||||
<div className="flex justify-between items-center">
|
||||
<h2 className="text-lg font-semibold text-gray-200">Custom Rules</h2>
|
||||
<button onClick={addRule} className="flex items-center gap-1 bg-indigo-600 hover:bg-indigo-500 text-white px-3 py-1.5 rounded-lg text-sm transition-colors">
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M12 4.5v15m7.5-7.5h-15" />
|
||||
</svg>
|
||||
Add Rule
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{(!character.custom_rules || character.custom_rules.length === 0) ? (
|
||||
<p className="text-sm text-gray-600 italic">No custom rules defined.</p>
|
||||
) : (
|
||||
<div className="space-y-4">
|
||||
{character.custom_rules.map((rule, idx) => (
|
||||
<div key={idx} className="border border-gray-700 p-4 rounded-lg relative bg-gray-800/50">
|
||||
<button
|
||||
onClick={() => removeRule(idx)}
|
||||
className="absolute top-3 right-3 text-gray-500 hover:text-red-400 transition-colors"
|
||||
title="Remove Rule"
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M6 18L18 6M6 6l12 12" />
|
||||
</svg>
|
||||
</button>
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-4 mt-1">
|
||||
<div>
|
||||
<label className="block text-xs font-medium mb-1 text-gray-500">Trigger</label>
|
||||
<input type="text" className={inputClass + " text-sm"} value={rule.trigger || ''} onChange={(e) => handleRuleChange(idx, 'trigger', e.target.value)} />
|
||||
</div>
|
||||
<div>
|
||||
<label className="block text-xs font-medium mb-1 text-gray-500">Condition (Optional)</label>
|
||||
<input type="text" className={inputClass + " text-sm"} value={rule.condition || ''} onChange={(e) => handleRuleChange(idx, 'condition', e.target.value)} placeholder="e.g. time_of_day == morning" />
|
||||
</div>
|
||||
<div className="md:col-span-2">
|
||||
<label className="block text-xs font-medium mb-1 text-gray-500">Response</label>
|
||||
<textarea className={inputClass + " text-sm h-16 resize-y"} value={rule.response || ''} onChange={(e) => handleRuleChange(idx, 'response', e.target.value)} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
201
homeai-dashboard/vite.config.js
Normal file
201
homeai-dashboard/vite.config.js
Normal file
@@ -0,0 +1,201 @@
|
||||
import { defineConfig } from 'vite'
|
||||
import react from '@vitejs/plugin-react'
|
||||
import tailwindcss from '@tailwindcss/vite'
|
||||
|
||||
function healthCheckPlugin() {
|
||||
return {
|
||||
name: 'health-check-proxy',
|
||||
configureServer(server) {
|
||||
server.middlewares.use('/api/health', async (req, res) => {
|
||||
const params = new URL(req.url, 'http://localhost').searchParams;
|
||||
const url = params.get('url');
|
||||
const mode = params.get('mode'); // 'tcp' for raw TCP port check
|
||||
if (!url) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Missing url param' }));
|
||||
return;
|
||||
}
|
||||
const start = Date.now();
|
||||
const parsedUrl = new URL(url);
|
||||
|
||||
try {
|
||||
if (mode === 'tcp') {
|
||||
// TCP socket connect check for non-HTTP services (e.g. Wyoming)
|
||||
const { default: net } = await import('net');
|
||||
await new Promise((resolve, reject) => {
|
||||
const socket = net.createConnection(
|
||||
{ host: parsedUrl.hostname, port: parseInt(parsedUrl.port), timeout: 5000 },
|
||||
() => { socket.destroy(); resolve(); }
|
||||
);
|
||||
socket.on('error', reject);
|
||||
socket.on('timeout', () => { socket.destroy(); reject(new Error('timeout')); });
|
||||
});
|
||||
} else {
|
||||
// HTTP/HTTPS health check
|
||||
const { default: https } = await import('https');
|
||||
const { default: http } = await import('http');
|
||||
const client = parsedUrl.protocol === 'https:' ? https : http;
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
const reqObj = client.get(url, { rejectUnauthorized: false, timeout: 5000 }, (resp) => {
|
||||
resp.resume();
|
||||
resolve();
|
||||
});
|
||||
reqObj.on('error', reject);
|
||||
reqObj.on('timeout', () => { reqObj.destroy(); reject(new Error('timeout')); });
|
||||
});
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ status: 'online', responseTime: Date.now() - start }));
|
||||
} catch {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ status: 'offline', responseTime: null }));
|
||||
}
|
||||
});
|
||||
|
||||
// Service restart — runs launchctl or docker restart
|
||||
server.middlewares.use('/api/service/restart', async (req, res) => {
|
||||
if (req.method === 'OPTIONS') {
|
||||
res.writeHead(204, { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Methods': 'POST', 'Access-Control-Allow-Headers': 'Content-Type' });
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
if (req.method !== 'POST') {
|
||||
res.writeHead(405);
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const chunks = [];
|
||||
for await (const chunk of req) chunks.push(chunk);
|
||||
const { type, id } = JSON.parse(Buffer.concat(chunks).toString());
|
||||
|
||||
if (!type || !id) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ ok: false, error: 'Missing type or id' }));
|
||||
return;
|
||||
}
|
||||
|
||||
// Whitelist valid service IDs to prevent command injection
|
||||
const ALLOWED_LAUNCHD = [
|
||||
'gui/501/com.homeai.ollama',
|
||||
'gui/501/com.homeai.openclaw',
|
||||
'gui/501/com.homeai.openclaw-bridge',
|
||||
'gui/501/com.homeai.wyoming-stt',
|
||||
'gui/501/com.homeai.wyoming-tts',
|
||||
'gui/501/com.homeai.wyoming-satellite',
|
||||
'gui/501/com.homeai.dashboard',
|
||||
];
|
||||
const ALLOWED_DOCKER = [
|
||||
'homeai-open-webui',
|
||||
'homeai-uptime-kuma',
|
||||
'homeai-n8n',
|
||||
'homeai-code-server',
|
||||
];
|
||||
|
||||
let cmd;
|
||||
if (type === 'launchd' && ALLOWED_LAUNCHD.includes(id)) {
|
||||
cmd = ['launchctl', 'kickstart', '-k', id];
|
||||
} else if (type === 'docker' && ALLOWED_DOCKER.includes(id)) {
|
||||
cmd = ['docker', 'restart', id];
|
||||
} else {
|
||||
res.writeHead(403, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ ok: false, error: 'Service not in allowed list' }));
|
||||
return;
|
||||
}
|
||||
|
||||
const { execFile } = await import('child_process');
|
||||
const { promisify } = await import('util');
|
||||
const execFileAsync = promisify(execFile);
|
||||
const { stdout, stderr } = await execFileAsync(cmd[0], cmd.slice(1), { timeout: 30000 });
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ ok: true, stdout: stdout.trim(), stderr: stderr.trim() }));
|
||||
} catch (err) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ ok: false, error: err.message }));
|
||||
}
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function bridgeProxyPlugin() {
|
||||
return {
|
||||
name: 'bridge-proxy',
|
||||
configureServer(server) {
|
||||
// Proxy a request to the OpenClaw bridge
|
||||
const proxyRequest = (targetPath) => async (req, res) => {
|
||||
if (req.method === 'OPTIONS') {
|
||||
res.writeHead(204, {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'POST, GET, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type',
|
||||
})
|
||||
res.end()
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const { default: http } = await import('http')
|
||||
const chunks = []
|
||||
for await (const chunk of req) chunks.push(chunk)
|
||||
const body = Buffer.concat(chunks)
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
const proxyReq = http.request(
|
||||
`http://localhost:8081${targetPath}`,
|
||||
{
|
||||
method: req.method,
|
||||
headers: {
|
||||
'Content-Type': req.headers['content-type'] || 'application/json',
|
||||
'Content-Length': body.length,
|
||||
},
|
||||
timeout: 120000,
|
||||
},
|
||||
(proxyRes) => {
|
||||
res.writeHead(proxyRes.statusCode, {
|
||||
'Content-Type': proxyRes.headers['content-type'] || 'application/json',
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
})
|
||||
proxyRes.pipe(res)
|
||||
proxyRes.on('end', resolve)
|
||||
proxyRes.on('error', resolve)
|
||||
}
|
||||
)
|
||||
proxyReq.on('error', reject)
|
||||
proxyReq.on('timeout', () => {
|
||||
proxyReq.destroy()
|
||||
reject(new Error('timeout'))
|
||||
})
|
||||
proxyReq.write(body)
|
||||
proxyReq.end()
|
||||
})
|
||||
} catch {
|
||||
if (!res.headersSent) {
|
||||
res.writeHead(502, { 'Content-Type': 'application/json' })
|
||||
res.end(JSON.stringify({ error: 'Bridge unreachable' }))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
server.middlewares.use('/api/agent/message', proxyRequest('/api/agent/message'))
|
||||
server.middlewares.use('/api/tts', proxyRequest('/api/tts'))
|
||||
server.middlewares.use('/api/stt', proxyRequest('/api/stt'))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [
|
||||
healthCheckPlugin(),
|
||||
bridgeProxyPlugin(),
|
||||
tailwindcss(),
|
||||
react(),
|
||||
],
|
||||
server: {
|
||||
host: '0.0.0.0',
|
||||
port: 5173,
|
||||
},
|
||||
})
|
||||
2
homeai-desktop/.gitignore
vendored
Normal file
2
homeai-desktop/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules/
|
||||
dist/
|
||||
15
homeai-desktop/index.html
Normal file
15
homeai-desktop/index.html
Normal file
@@ -0,0 +1,15 @@
|
||||
<!doctype html>
|
||||
<html lang="en" class="dark">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/icon.svg" />
|
||||
<link rel="manifest" href="/manifest.json" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta name="theme-color" content="#030712" />
|
||||
<title>HomeAI Assistant</title>
|
||||
</head>
|
||||
<body class="bg-gray-950 text-gray-100">
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.jsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
31
homeai-desktop/launchd/com.homeai.desktop-assistant.plist
Normal file
31
homeai-desktop/launchd/com.homeai.desktop-assistant.plist
Normal file
@@ -0,0 +1,31 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.homeai.desktop-assistant</string>
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/opt/homebrew/bin/npx</string>
|
||||
<string>vite</string>
|
||||
<string>--host</string>
|
||||
<string>--port</string>
|
||||
<string>5174</string>
|
||||
</array>
|
||||
<key>WorkingDirectory</key>
|
||||
<string>/Users/aodhan/gitea/homeai/homeai-desktop</string>
|
||||
<key>RunAtLoad</key>
|
||||
<true/>
|
||||
<key>KeepAlive</key>
|
||||
<true/>
|
||||
<key>StandardOutPath</key>
|
||||
<string>/tmp/homeai-desktop-assistant.log</string>
|
||||
<key>StandardErrorPath</key>
|
||||
<string>/tmp/homeai-desktop-assistant-error.log</string>
|
||||
<key>EnvironmentVariables</key>
|
||||
<dict>
|
||||
<key>PATH</key>
|
||||
<string>/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin</string>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
||||
2117
homeai-desktop/package-lock.json
generated
Normal file
2117
homeai-desktop/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
24
homeai-desktop/package.json
Normal file
24
homeai-desktop/package.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"name": "homeai-desktop",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tailwindcss/vite": "^4.2.1",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"tailwindcss": "^4.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@vitejs/plugin-react": "^5.1.1",
|
||||
"vite": "^8.0.0-beta.13"
|
||||
},
|
||||
"overrides": {
|
||||
"vite": "^8.0.0-beta.13"
|
||||
}
|
||||
}
|
||||
9
homeai-desktop/public/icon.svg
Normal file
9
homeai-desktop/public/icon.svg
Normal file
@@ -0,0 +1,9 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 64 64">
|
||||
<rect width="64" height="64" rx="14" fill="#030712"/>
|
||||
<circle cx="32" cy="28" r="12" fill="none" stroke="#818cf8" stroke-width="2.5"/>
|
||||
<path d="M26 26c0-3.3 2.7-6 6-6s6 2.7 6 6" fill="none" stroke="#818cf8" stroke-width="2" stroke-linecap="round"/>
|
||||
<rect x="30" y="40" width="4" height="8" rx="2" fill="#818cf8"/>
|
||||
<path d="M24 52h16" stroke="#818cf8" stroke-width="2.5" stroke-linecap="round"/>
|
||||
<circle cx="29" cy="27" r="1.5" fill="#34d399"/>
|
||||
<circle cx="35" cy="27" r="1.5" fill="#34d399"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 575 B |
16
homeai-desktop/public/manifest.json
Normal file
16
homeai-desktop/public/manifest.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"name": "HomeAI Assistant",
|
||||
"short_name": "HomeAI",
|
||||
"description": "Desktop AI assistant powered by local LLMs",
|
||||
"start_url": "/",
|
||||
"display": "standalone",
|
||||
"background_color": "#030712",
|
||||
"theme_color": "#030712",
|
||||
"icons": [
|
||||
{
|
||||
"src": "/icon.svg",
|
||||
"sizes": "any",
|
||||
"type": "image/svg+xml"
|
||||
}
|
||||
]
|
||||
}
|
||||
115
homeai-desktop/src/App.jsx
Normal file
115
homeai-desktop/src/App.jsx
Normal file
@@ -0,0 +1,115 @@
|
||||
import { useState, useEffect, useCallback } from 'react'
|
||||
import ChatPanel from './components/ChatPanel'
|
||||
import InputBar from './components/InputBar'
|
||||
import StatusIndicator from './components/StatusIndicator'
|
||||
import SettingsDrawer from './components/SettingsDrawer'
|
||||
import { useSettings } from './hooks/useSettings'
|
||||
import { useBridgeHealth } from './hooks/useBridgeHealth'
|
||||
import { useChat } from './hooks/useChat'
|
||||
import { useTtsPlayback } from './hooks/useTtsPlayback'
|
||||
import { useVoiceInput } from './hooks/useVoiceInput'
|
||||
|
||||
export default function App() {
|
||||
const { settings, updateSetting } = useSettings()
|
||||
const isOnline = useBridgeHealth()
|
||||
const { messages, isLoading, send, clearHistory } = useChat()
|
||||
const { isPlaying, speak, stop } = useTtsPlayback(settings.voice)
|
||||
const { isRecording, isTranscribing, startRecording, stopRecording } = useVoiceInput(settings.sttMode)
|
||||
const [settingsOpen, setSettingsOpen] = useState(false)
|
||||
|
||||
// Send a message and optionally speak the response
|
||||
const handleSend = useCallback(async (text) => {
|
||||
const response = await send(text)
|
||||
if (response && settings.autoTts) {
|
||||
speak(response)
|
||||
}
|
||||
}, [send, settings.autoTts, speak])
|
||||
|
||||
// Toggle voice recording
|
||||
const handleVoiceToggle = useCallback(async () => {
|
||||
if (isRecording) {
|
||||
const text = await stopRecording()
|
||||
if (text) {
|
||||
handleSend(text)
|
||||
}
|
||||
} else {
|
||||
startRecording()
|
||||
}
|
||||
}, [isRecording, stopRecording, startRecording, handleSend])
|
||||
|
||||
// Space bar push-to-talk when input not focused
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (e) => {
|
||||
if (e.code === 'Space' && e.target.tagName !== 'TEXTAREA' && e.target.tagName !== 'INPUT') {
|
||||
e.preventDefault()
|
||||
handleVoiceToggle()
|
||||
}
|
||||
}
|
||||
window.addEventListener('keydown', handleKeyDown)
|
||||
return () => window.removeEventListener('keydown', handleKeyDown)
|
||||
}, [handleVoiceToggle])
|
||||
|
||||
return (
|
||||
<div className="h-screen flex flex-col bg-gray-950">
|
||||
{/* Status bar */}
|
||||
<header className="flex items-center justify-between px-4 py-2 border-b border-gray-800/50">
|
||||
<div className="flex items-center gap-2">
|
||||
<StatusIndicator isOnline={isOnline} />
|
||||
<span className="text-xs text-gray-500">
|
||||
{isOnline === null ? 'Connecting...' : isOnline ? 'Connected' : 'Offline'}
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
{messages.length > 0 && (
|
||||
<button
|
||||
onClick={clearHistory}
|
||||
className="text-xs text-gray-500 hover:text-gray-300 transition-colors px-2 py-1"
|
||||
title="Clear conversation"
|
||||
>
|
||||
Clear
|
||||
</button>
|
||||
)}
|
||||
{isPlaying && (
|
||||
<button
|
||||
onClick={stop}
|
||||
className="text-xs text-indigo-400 hover:text-indigo-300 transition-colors px-2 py-1"
|
||||
title="Stop speaking"
|
||||
>
|
||||
Stop audio
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
onClick={() => setSettingsOpen(true)}
|
||||
className="text-gray-500 hover:text-gray-300 transition-colors p-1"
|
||||
title="Settings"
|
||||
>
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M9.594 3.94c.09-.542.56-.94 1.11-.94h2.593c.55 0 1.02.398 1.11.94l.213 1.281c.063.374.313.686.645.87.074.04.147.083.22.127.325.196.72.257 1.075.124l1.217-.456a1.125 1.125 0 011.37.49l1.296 2.247a1.125 1.125 0 01-.26 1.431l-1.003.827c-.293.241-.438.613-.43.992a7.723 7.723 0 010 .255c-.008.378.137.75.43.991l1.004.827c.424.35.534.955.26 1.43l-1.298 2.247a1.125 1.125 0 01-1.369.491l-1.217-.456c-.355-.133-.75-.072-1.076.124a6.47 6.47 0 01-.22.128c-.331.183-.581.495-.644.869l-.213 1.281c-.09.543-.56.941-1.11.941h-2.594c-.55 0-1.019-.398-1.11-.94l-.213-1.281c-.062-.374-.312-.686-.644-.87a6.52 6.52 0 01-.22-.127c-.325-.196-.72-.257-1.076-.124l-1.217.456a1.125 1.125 0 01-1.369-.49l-1.297-2.247a1.125 1.125 0 01.26-1.431l1.004-.827c.292-.24.437-.613.43-.991a6.932 6.932 0 010-.255c.007-.38-.138-.751-.43-.992l-1.004-.827a1.125 1.125 0 01-.26-1.43l1.297-2.247a1.125 1.125 0 011.37-.491l1.216.456c.356.133.751.072 1.076-.124.072-.044.146-.086.22-.128.332-.183.582-.495.644-.869l.214-1.28z" />
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M15 12a3 3 0 11-6 0 3 3 0 016 0z" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
{/* Chat area */}
|
||||
<ChatPanel messages={messages} isLoading={isLoading} onReplay={speak} />
|
||||
|
||||
{/* Input */}
|
||||
<InputBar
|
||||
onSend={handleSend}
|
||||
onVoiceToggle={handleVoiceToggle}
|
||||
isLoading={isLoading}
|
||||
isRecording={isRecording}
|
||||
isTranscribing={isTranscribing}
|
||||
/>
|
||||
|
||||
{/* Settings drawer */}
|
||||
<SettingsDrawer
|
||||
isOpen={settingsOpen}
|
||||
onClose={() => setSettingsOpen(false)}
|
||||
settings={settings}
|
||||
onUpdate={updateSetting}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
35
homeai-desktop/src/components/ChatPanel.jsx
Normal file
35
homeai-desktop/src/components/ChatPanel.jsx
Normal file
@@ -0,0 +1,35 @@
|
||||
import { useEffect, useRef } from 'react'
|
||||
import MessageBubble from './MessageBubble'
|
||||
import ThinkingIndicator from './ThinkingIndicator'
|
||||
|
||||
export default function ChatPanel({ messages, isLoading, onReplay }) {
|
||||
const bottomRef = useRef(null)
|
||||
|
||||
useEffect(() => {
|
||||
bottomRef.current?.scrollIntoView({ behavior: 'smooth' })
|
||||
}, [messages, isLoading])
|
||||
|
||||
if (messages.length === 0 && !isLoading) {
|
||||
return (
|
||||
<div className="flex-1 flex items-center justify-center">
|
||||
<div className="text-center">
|
||||
<div className="w-16 h-16 rounded-full bg-indigo-600/20 flex items-center justify-center mx-auto mb-4">
|
||||
<span className="text-indigo-400 text-2xl">AI</span>
|
||||
</div>
|
||||
<h2 className="text-xl font-medium text-gray-200 mb-2">Hi, I'm Aria</h2>
|
||||
<p className="text-gray-500 text-sm">Type a message or press the mic to talk</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex-1 overflow-y-auto py-4">
|
||||
{messages.map((msg) => (
|
||||
<MessageBubble key={msg.id} message={msg} onReplay={onReplay} />
|
||||
))}
|
||||
{isLoading && <ThinkingIndicator />}
|
||||
<div ref={bottomRef} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
53
homeai-desktop/src/components/InputBar.jsx
Normal file
53
homeai-desktop/src/components/InputBar.jsx
Normal file
@@ -0,0 +1,53 @@
|
||||
import { useState, useRef } from 'react'
|
||||
import VoiceButton from './VoiceButton'
|
||||
|
||||
export default function InputBar({ onSend, onVoiceToggle, isLoading, isRecording, isTranscribing }) {
|
||||
const [text, setText] = useState('')
|
||||
const inputRef = useRef(null)
|
||||
|
||||
const handleSubmit = (e) => {
|
||||
e.preventDefault()
|
||||
if (!text.trim() || isLoading) return
|
||||
onSend(text)
|
||||
setText('')
|
||||
}
|
||||
|
||||
const handleKeyDown = (e) => {
|
||||
if (e.key === 'Enter' && !e.shiftKey) {
|
||||
e.preventDefault()
|
||||
handleSubmit(e)
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<form onSubmit={handleSubmit} className="border-t border-gray-800 bg-gray-950 px-4 py-3">
|
||||
<div className="flex items-end gap-2 max-w-3xl mx-auto">
|
||||
<VoiceButton
|
||||
isRecording={isRecording}
|
||||
isTranscribing={isTranscribing}
|
||||
onToggle={onVoiceToggle}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<textarea
|
||||
ref={inputRef}
|
||||
value={text}
|
||||
onChange={(e) => setText(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
placeholder="Type a message..."
|
||||
rows={1}
|
||||
className="flex-1 bg-gray-800 text-gray-100 rounded-xl px-4 py-2.5 text-sm resize-none placeholder-gray-500 focus:outline-none focus:ring-1 focus:ring-indigo-500 min-h-[42px] max-h-32"
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<button
|
||||
type="submit"
|
||||
disabled={!text.trim() || isLoading}
|
||||
className="w-10 h-10 rounded-full bg-indigo-600 text-white flex items-center justify-center shrink-0 hover:bg-indigo-500 disabled:opacity-40 disabled:hover:bg-indigo-600 transition-colors"
|
||||
>
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M6 12L3.269 3.126A59.768 59.768 0 0121.485 12 59.77 59.77 0 013.27 20.876L5.999 12zm0 0h7.5" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
39
homeai-desktop/src/components/MessageBubble.jsx
Normal file
39
homeai-desktop/src/components/MessageBubble.jsx
Normal file
@@ -0,0 +1,39 @@
|
||||
export default function MessageBubble({ message, onReplay }) {
|
||||
const isUser = message.role === 'user'
|
||||
|
||||
return (
|
||||
<div className={`flex ${isUser ? 'justify-end' : 'justify-start'} px-4 py-1.5`}>
|
||||
<div className={`flex items-start gap-3 max-w-[80%] ${isUser ? 'flex-row-reverse' : ''}`}>
|
||||
{!isUser && (
|
||||
<div className="w-8 h-8 rounded-full bg-indigo-600/20 flex items-center justify-center shrink-0 mt-0.5">
|
||||
<span className="text-indigo-400 text-sm">AI</span>
|
||||
</div>
|
||||
)}
|
||||
<div>
|
||||
<div
|
||||
className={`rounded-2xl px-4 py-2.5 text-sm leading-relaxed whitespace-pre-wrap ${
|
||||
isUser
|
||||
? 'bg-indigo-600 text-white'
|
||||
: message.isError
|
||||
? 'bg-red-900/40 text-red-200 border border-red-800/50'
|
||||
: 'bg-gray-800 text-gray-100'
|
||||
}`}
|
||||
>
|
||||
{message.content}
|
||||
</div>
|
||||
{!isUser && !message.isError && onReplay && (
|
||||
<button
|
||||
onClick={() => onReplay(message.content)}
|
||||
className="mt-1 ml-1 text-gray-500 hover:text-indigo-400 transition-colors"
|
||||
title="Replay audio"
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M15.536 8.464a5 5 0 010 7.072M17.95 6.05a8 8 0 010 11.9M6.5 9H4a1 1 0 00-1 1v4a1 1 0 001 1h2.5l4 4V5l-4 4z" />
|
||||
</svg>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
74
homeai-desktop/src/components/SettingsDrawer.jsx
Normal file
74
homeai-desktop/src/components/SettingsDrawer.jsx
Normal file
@@ -0,0 +1,74 @@
|
||||
import { VOICES } from '../lib/constants'
|
||||
|
||||
export default function SettingsDrawer({ isOpen, onClose, settings, onUpdate }) {
|
||||
if (!isOpen) return null
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="fixed inset-0 bg-black/50 z-40" onClick={onClose} />
|
||||
<div className="fixed right-0 top-0 bottom-0 w-80 bg-gray-900 border-l border-gray-800 z-50 flex flex-col">
|
||||
<div className="flex items-center justify-between px-4 py-3 border-b border-gray-800">
|
||||
<h2 className="text-sm font-medium text-gray-200">Settings</h2>
|
||||
<button onClick={onClose} className="text-gray-500 hover:text-gray-300">
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M6 18L18 6M6 6l12 12" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
<div className="flex-1 overflow-y-auto p-4 space-y-5">
|
||||
{/* Voice */}
|
||||
<div>
|
||||
<label className="block text-xs font-medium text-gray-400 mb-1.5">Voice</label>
|
||||
<select
|
||||
value={settings.voice}
|
||||
onChange={(e) => onUpdate('voice', e.target.value)}
|
||||
className="w-full bg-gray-800 text-gray-200 text-sm rounded-lg px-3 py-2 border border-gray-700 focus:outline-none focus:border-indigo-500"
|
||||
>
|
||||
{VOICES.map((v) => (
|
||||
<option key={v.id} value={v.id}>{v.label}</option>
|
||||
))}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
{/* Auto TTS */}
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<div className="text-sm text-gray-200">Auto-speak responses</div>
|
||||
<div className="text-xs text-gray-500">Speak assistant replies aloud</div>
|
||||
</div>
|
||||
<button
|
||||
onClick={() => onUpdate('autoTts', !settings.autoTts)}
|
||||
className={`relative w-10 h-6 rounded-full transition-colors ${
|
||||
settings.autoTts ? 'bg-indigo-600' : 'bg-gray-700'
|
||||
}`}
|
||||
>
|
||||
<span
|
||||
className={`absolute top-0.5 left-0.5 w-5 h-5 rounded-full bg-white transition-transform ${
|
||||
settings.autoTts ? 'translate-x-4' : ''
|
||||
}`}
|
||||
/>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* STT Mode */}
|
||||
<div>
|
||||
<label className="block text-xs font-medium text-gray-400 mb-1.5">Speech recognition</label>
|
||||
<select
|
||||
value={settings.sttMode}
|
||||
onChange={(e) => onUpdate('sttMode', e.target.value)}
|
||||
className="w-full bg-gray-800 text-gray-200 text-sm rounded-lg px-3 py-2 border border-gray-700 focus:outline-none focus:border-indigo-500"
|
||||
>
|
||||
<option value="bridge">Wyoming STT (local)</option>
|
||||
<option value="webspeech">Web Speech API (browser)</option>
|
||||
</select>
|
||||
<p className="text-xs text-gray-500 mt-1">
|
||||
{settings.sttMode === 'bridge'
|
||||
? 'Uses Whisper via the local bridge server'
|
||||
: 'Uses browser built-in speech recognition'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
11
homeai-desktop/src/components/StatusIndicator.jsx
Normal file
11
homeai-desktop/src/components/StatusIndicator.jsx
Normal file
@@ -0,0 +1,11 @@
|
||||
export default function StatusIndicator({ isOnline }) {
|
||||
if (isOnline === null) {
|
||||
return <span className="inline-block w-2.5 h-2.5 rounded-full bg-gray-500 animate-pulse" title="Checking..." />
|
||||
}
|
||||
return (
|
||||
<span
|
||||
className={`inline-block w-2.5 h-2.5 rounded-full ${isOnline ? 'bg-emerald-400' : 'bg-red-400'}`}
|
||||
title={isOnline ? 'Bridge online' : 'Bridge offline'}
|
||||
/>
|
||||
)
|
||||
}
|
||||
14
homeai-desktop/src/components/ThinkingIndicator.jsx
Normal file
14
homeai-desktop/src/components/ThinkingIndicator.jsx
Normal file
@@ -0,0 +1,14 @@
|
||||
export default function ThinkingIndicator() {
|
||||
return (
|
||||
<div className="flex items-start gap-3 px-4 py-3">
|
||||
<div className="w-8 h-8 rounded-full bg-indigo-600/20 flex items-center justify-center shrink-0">
|
||||
<span className="text-indigo-400 text-sm">AI</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-1 pt-2.5">
|
||||
<span className="w-2 h-2 rounded-full bg-gray-400 animate-[bounce_1.4s_ease-in-out_infinite]" />
|
||||
<span className="w-2 h-2 rounded-full bg-gray-400 animate-[bounce_1.4s_ease-in-out_0.2s_infinite]" />
|
||||
<span className="w-2 h-2 rounded-full bg-gray-400 animate-[bounce_1.4s_ease-in-out_0.4s_infinite]" />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
32
homeai-desktop/src/components/VoiceButton.jsx
Normal file
32
homeai-desktop/src/components/VoiceButton.jsx
Normal file
@@ -0,0 +1,32 @@
|
||||
export default function VoiceButton({ isRecording, isTranscribing, onToggle, disabled }) {
|
||||
const handleClick = () => {
|
||||
if (disabled || isTranscribing) return
|
||||
onToggle()
|
||||
}
|
||||
|
||||
return (
|
||||
<button
|
||||
onClick={handleClick}
|
||||
disabled={disabled || isTranscribing}
|
||||
className={`w-10 h-10 rounded-full flex items-center justify-center transition-all shrink-0 ${
|
||||
isRecording
|
||||
? 'bg-red-500 text-white shadow-[0_0_0_4px_rgba(239,68,68,0.3)] animate-pulse'
|
||||
: isTranscribing
|
||||
? 'bg-gray-700 text-gray-400 cursor-wait'
|
||||
: 'bg-gray-800 text-gray-400 hover:bg-gray-700 hover:text-gray-200'
|
||||
}`}
|
||||
title={isRecording ? 'Stop recording' : isTranscribing ? 'Transcribing...' : 'Start recording (Space)'}
|
||||
>
|
||||
{isTranscribing ? (
|
||||
<svg className="w-5 h-5 animate-spin" fill="none" viewBox="0 0 24 24">
|
||||
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4" />
|
||||
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4z" />
|
||||
</svg>
|
||||
) : (
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M12 18.75a6 6 0 006-6v-1.5m-6 7.5a6 6 0 01-6-6v-1.5m6 7.5v3.75m-3.75 0h7.5M12 15.75a3 3 0 01-3-3V4.5a3 3 0 116 0v8.25a3 3 0 01-3 3z" />
|
||||
</svg>
|
||||
)}
|
||||
</button>
|
||||
)
|
||||
}
|
||||
18
homeai-desktop/src/hooks/useBridgeHealth.js
Normal file
18
homeai-desktop/src/hooks/useBridgeHealth.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { useState, useEffect, useRef } from 'react'
|
||||
import { healthCheck } from '../lib/api'
|
||||
|
||||
export function useBridgeHealth() {
|
||||
const [isOnline, setIsOnline] = useState(null)
|
||||
const intervalRef = useRef(null)
|
||||
|
||||
useEffect(() => {
|
||||
const check = async () => {
|
||||
setIsOnline(await healthCheck())
|
||||
}
|
||||
check()
|
||||
intervalRef.current = setInterval(check, 15000)
|
||||
return () => clearInterval(intervalRef.current)
|
||||
}, [])
|
||||
|
||||
return isOnline
|
||||
}
|
||||
45
homeai-desktop/src/hooks/useChat.js
Normal file
45
homeai-desktop/src/hooks/useChat.js
Normal file
@@ -0,0 +1,45 @@
|
||||
import { useState, useCallback } from 'react'
|
||||
import { sendMessage } from '../lib/api'
|
||||
|
||||
export function useChat() {
|
||||
const [messages, setMessages] = useState([])
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
|
||||
const send = useCallback(async (text) => {
|
||||
if (!text.trim() || isLoading) return null
|
||||
|
||||
const userMsg = { id: Date.now(), role: 'user', content: text.trim(), timestamp: new Date() }
|
||||
setMessages((prev) => [...prev, userMsg])
|
||||
setIsLoading(true)
|
||||
|
||||
try {
|
||||
const response = await sendMessage(text.trim())
|
||||
const assistantMsg = {
|
||||
id: Date.now() + 1,
|
||||
role: 'assistant',
|
||||
content: response,
|
||||
timestamp: new Date(),
|
||||
}
|
||||
setMessages((prev) => [...prev, assistantMsg])
|
||||
return response
|
||||
} catch (err) {
|
||||
const errorMsg = {
|
||||
id: Date.now() + 1,
|
||||
role: 'assistant',
|
||||
content: `Error: ${err.message}`,
|
||||
timestamp: new Date(),
|
||||
isError: true,
|
||||
}
|
||||
setMessages((prev) => [...prev, errorMsg])
|
||||
return null
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [isLoading])
|
||||
|
||||
const clearHistory = useCallback(() => {
|
||||
setMessages([])
|
||||
}, [])
|
||||
|
||||
return { messages, isLoading, send, clearHistory }
|
||||
}
|
||||
27
homeai-desktop/src/hooks/useSettings.js
Normal file
27
homeai-desktop/src/hooks/useSettings.js
Normal file
@@ -0,0 +1,27 @@
|
||||
import { useState, useCallback } from 'react'
|
||||
import { DEFAULT_SETTINGS } from '../lib/constants'
|
||||
|
||||
const STORAGE_KEY = 'homeai_desktop_settings'
|
||||
|
||||
function loadSettings() {
|
||||
try {
|
||||
const stored = localStorage.getItem(STORAGE_KEY)
|
||||
return stored ? { ...DEFAULT_SETTINGS, ...JSON.parse(stored) } : { ...DEFAULT_SETTINGS }
|
||||
} catch {
|
||||
return { ...DEFAULT_SETTINGS }
|
||||
}
|
||||
}
|
||||
|
||||
export function useSettings() {
|
||||
const [settings, setSettings] = useState(loadSettings)
|
||||
|
||||
const updateSetting = useCallback((key, value) => {
|
||||
setSettings((prev) => {
|
||||
const next = { ...prev, [key]: value }
|
||||
localStorage.setItem(STORAGE_KEY, JSON.stringify(next))
|
||||
return next
|
||||
})
|
||||
}, [])
|
||||
|
||||
return { settings, updateSetting }
|
||||
}
|
||||
56
homeai-desktop/src/hooks/useTtsPlayback.js
Normal file
56
homeai-desktop/src/hooks/useTtsPlayback.js
Normal file
@@ -0,0 +1,56 @@
|
||||
import { useState, useRef, useCallback } from 'react'
|
||||
import { synthesize } from '../lib/api'
|
||||
|
||||
export function useTtsPlayback(voice) {
|
||||
const [isPlaying, setIsPlaying] = useState(false)
|
||||
const audioCtxRef = useRef(null)
|
||||
const sourceRef = useRef(null)
|
||||
|
||||
const getAudioContext = () => {
|
||||
if (!audioCtxRef.current || audioCtxRef.current.state === 'closed') {
|
||||
audioCtxRef.current = new AudioContext()
|
||||
}
|
||||
return audioCtxRef.current
|
||||
}
|
||||
|
||||
const speak = useCallback(async (text) => {
|
||||
if (!text) return
|
||||
|
||||
// Stop any current playback
|
||||
if (sourceRef.current) {
|
||||
try { sourceRef.current.stop() } catch {}
|
||||
}
|
||||
|
||||
setIsPlaying(true)
|
||||
try {
|
||||
const audioData = await synthesize(text, voice)
|
||||
const ctx = getAudioContext()
|
||||
if (ctx.state === 'suspended') await ctx.resume()
|
||||
|
||||
const audioBuffer = await ctx.decodeAudioData(audioData)
|
||||
const source = ctx.createBufferSource()
|
||||
source.buffer = audioBuffer
|
||||
source.connect(ctx.destination)
|
||||
sourceRef.current = source
|
||||
|
||||
source.onended = () => {
|
||||
setIsPlaying(false)
|
||||
sourceRef.current = null
|
||||
}
|
||||
source.start()
|
||||
} catch (err) {
|
||||
console.error('TTS playback error:', err)
|
||||
setIsPlaying(false)
|
||||
}
|
||||
}, [voice])
|
||||
|
||||
const stop = useCallback(() => {
|
||||
if (sourceRef.current) {
|
||||
try { sourceRef.current.stop() } catch {}
|
||||
sourceRef.current = null
|
||||
}
|
||||
setIsPlaying(false)
|
||||
}, [])
|
||||
|
||||
return { isPlaying, speak, stop }
|
||||
}
|
||||
93
homeai-desktop/src/hooks/useVoiceInput.js
Normal file
93
homeai-desktop/src/hooks/useVoiceInput.js
Normal file
@@ -0,0 +1,93 @@
|
||||
import { useState, useRef, useCallback } from 'react'
|
||||
import { createRecorder } from '../lib/audio'
|
||||
import { transcribe } from '../lib/api'
|
||||
|
||||
export function useVoiceInput(sttMode = 'bridge') {
|
||||
const [isRecording, setIsRecording] = useState(false)
|
||||
const [isTranscribing, setIsTranscribing] = useState(false)
|
||||
const recorderRef = useRef(null)
|
||||
const webSpeechRef = useRef(null)
|
||||
|
||||
const startRecording = useCallback(async () => {
|
||||
if (isRecording) return
|
||||
|
||||
if (sttMode === 'webspeech' && 'webkitSpeechRecognition' in window) {
|
||||
return startWebSpeech()
|
||||
}
|
||||
|
||||
try {
|
||||
const recorder = createRecorder()
|
||||
recorderRef.current = recorder
|
||||
await recorder.start()
|
||||
setIsRecording(true)
|
||||
} catch (err) {
|
||||
console.error('Mic access error:', err)
|
||||
}
|
||||
}, [isRecording, sttMode])
|
||||
|
||||
const stopRecording = useCallback(async () => {
|
||||
if (!isRecording) return null
|
||||
|
||||
if (sttMode === 'webspeech' && webSpeechRef.current) {
|
||||
return stopWebSpeech()
|
||||
}
|
||||
|
||||
setIsRecording(false)
|
||||
setIsTranscribing(true)
|
||||
|
||||
try {
|
||||
const wavBlob = await recorderRef.current.stop()
|
||||
recorderRef.current = null
|
||||
const text = await transcribe(wavBlob)
|
||||
return text
|
||||
} catch (err) {
|
||||
console.error('Transcription error:', err)
|
||||
return null
|
||||
} finally {
|
||||
setIsTranscribing(false)
|
||||
}
|
||||
}, [isRecording, sttMode])
|
||||
|
||||
// Web Speech API fallback
|
||||
function startWebSpeech() {
|
||||
return new Promise((resolve) => {
|
||||
const SpeechRecognition = window.webkitSpeechRecognition || window.SpeechRecognition
|
||||
const recognition = new SpeechRecognition()
|
||||
recognition.continuous = false
|
||||
recognition.interimResults = false
|
||||
recognition.lang = 'en-US'
|
||||
webSpeechRef.current = { recognition, resolve: null }
|
||||
recognition.start()
|
||||
setIsRecording(true)
|
||||
resolve()
|
||||
})
|
||||
}
|
||||
|
||||
function stopWebSpeech() {
|
||||
return new Promise((resolve) => {
|
||||
const { recognition } = webSpeechRef.current
|
||||
recognition.onresult = (e) => {
|
||||
const text = e.results[0]?.[0]?.transcript || ''
|
||||
setIsRecording(false)
|
||||
webSpeechRef.current = null
|
||||
resolve(text)
|
||||
}
|
||||
recognition.onerror = () => {
|
||||
setIsRecording(false)
|
||||
webSpeechRef.current = null
|
||||
resolve(null)
|
||||
}
|
||||
recognition.onend = () => {
|
||||
// If no result event fired, resolve with null
|
||||
setIsRecording(false)
|
||||
if (webSpeechRef.current) {
|
||||
webSpeechRef.current = null
|
||||
resolve(null)
|
||||
}
|
||||
}
|
||||
recognition.stop()
|
||||
})
|
||||
}
|
||||
|
||||
return { isRecording, isTranscribing, startRecording, stopRecording }
|
||||
}
|
||||
1
homeai-desktop/src/index.css
Normal file
1
homeai-desktop/src/index.css
Normal file
@@ -0,0 +1 @@
|
||||
@import "tailwindcss";
|
||||
44
homeai-desktop/src/lib/api.js
Normal file
44
homeai-desktop/src/lib/api.js
Normal file
@@ -0,0 +1,44 @@
|
||||
export async function sendMessage(text) {
|
||||
const res = await fetch('/api/agent/message', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ message: text, agent: 'main' }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const err = await res.json().catch(() => ({ error: 'Request failed' }))
|
||||
throw new Error(err.error || `HTTP ${res.status}`)
|
||||
}
|
||||
const data = await res.json()
|
||||
return data.response
|
||||
}
|
||||
|
||||
export async function synthesize(text, voice) {
|
||||
const res = await fetch('/api/tts', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ text, voice }),
|
||||
})
|
||||
if (!res.ok) throw new Error('TTS failed')
|
||||
return await res.arrayBuffer()
|
||||
}
|
||||
|
||||
export async function transcribe(wavBlob) {
|
||||
const res = await fetch('/api/stt', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'audio/wav' },
|
||||
body: wavBlob,
|
||||
})
|
||||
if (!res.ok) throw new Error('STT failed')
|
||||
const data = await res.json()
|
||||
return data.text
|
||||
}
|
||||
|
||||
export async function healthCheck() {
|
||||
try {
|
||||
const res = await fetch('/api/health', { signal: AbortSignal.timeout(5000) })
|
||||
const data = await res.json()
|
||||
return data.status === 'online'
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
103
homeai-desktop/src/lib/audio.js
Normal file
103
homeai-desktop/src/lib/audio.js
Normal file
@@ -0,0 +1,103 @@
|
||||
const TARGET_RATE = 16000
|
||||
|
||||
/**
|
||||
* Record audio from the microphone, returning a WAV blob when stopped.
|
||||
* Returns { start, stop } — call start() to begin, stop() resolves with a Blob.
|
||||
*/
|
||||
export function createRecorder() {
|
||||
let audioCtx
|
||||
let source
|
||||
let processor
|
||||
let stream
|
||||
let samples = []
|
||||
|
||||
async function start() {
|
||||
samples = []
|
||||
stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: { channelCount: 1, sampleRate: TARGET_RATE },
|
||||
})
|
||||
audioCtx = new AudioContext({ sampleRate: TARGET_RATE })
|
||||
source = audioCtx.createMediaStreamSource(stream)
|
||||
|
||||
// ScriptProcessorNode captures raw Float32 PCM
|
||||
processor = audioCtx.createScriptProcessor(4096, 1, 1)
|
||||
processor.onaudioprocess = (e) => {
|
||||
const input = e.inputBuffer.getChannelData(0)
|
||||
samples.push(new Float32Array(input))
|
||||
}
|
||||
source.connect(processor)
|
||||
processor.connect(audioCtx.destination)
|
||||
}
|
||||
|
||||
async function stop() {
|
||||
// Stop everything
|
||||
processor.disconnect()
|
||||
source.disconnect()
|
||||
stream.getTracks().forEach((t) => t.stop())
|
||||
await audioCtx.close()
|
||||
|
||||
// Merge all sample chunks
|
||||
const totalLength = samples.reduce((acc, s) => acc + s.length, 0)
|
||||
const merged = new Float32Array(totalLength)
|
||||
let offset = 0
|
||||
for (const chunk of samples) {
|
||||
merged.set(chunk, offset)
|
||||
offset += chunk.length
|
||||
}
|
||||
|
||||
// Resample if the actual sample rate differs from target
|
||||
const resampled = audioCtx.sampleRate !== TARGET_RATE
|
||||
? resample(merged, audioCtx.sampleRate, TARGET_RATE)
|
||||
: merged
|
||||
|
||||
// Convert to 16-bit PCM WAV
|
||||
return encodeWav(resampled, TARGET_RATE)
|
||||
}
|
||||
|
||||
return { start, stop }
|
||||
}
|
||||
|
||||
function resample(samples, fromRate, toRate) {
|
||||
const ratio = fromRate / toRate
|
||||
const newLength = Math.round(samples.length / ratio)
|
||||
const result = new Float32Array(newLength)
|
||||
for (let i = 0; i < newLength; i++) {
|
||||
result[i] = samples[Math.round(i * ratio)]
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
function encodeWav(samples, sampleRate) {
|
||||
const numSamples = samples.length
|
||||
const buffer = new ArrayBuffer(44 + numSamples * 2)
|
||||
const view = new DataView(buffer)
|
||||
|
||||
// WAV header
|
||||
writeString(view, 0, 'RIFF')
|
||||
view.setUint32(4, 36 + numSamples * 2, true)
|
||||
writeString(view, 8, 'WAVE')
|
||||
writeString(view, 12, 'fmt ')
|
||||
view.setUint32(16, 16, true) // chunk size
|
||||
view.setUint16(20, 1, true) // PCM
|
||||
view.setUint16(22, 1, true) // mono
|
||||
view.setUint32(24, sampleRate, true)
|
||||
view.setUint32(28, sampleRate * 2, true) // byte rate
|
||||
view.setUint16(32, 2, true) // block align
|
||||
view.setUint16(34, 16, true) // bits per sample
|
||||
writeString(view, 36, 'data')
|
||||
view.setUint32(40, numSamples * 2, true)
|
||||
|
||||
// PCM data — clamp Float32 to Int16
|
||||
for (let i = 0; i < numSamples; i++) {
|
||||
const s = Math.max(-1, Math.min(1, samples[i]))
|
||||
view.setInt16(44 + i * 2, s < 0 ? s * 0x8000 : s * 0x7fff, true)
|
||||
}
|
||||
|
||||
return new Blob([buffer], { type: 'audio/wav' })
|
||||
}
|
||||
|
||||
function writeString(view, offset, str) {
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
view.setUint8(offset + i, str.charCodeAt(i))
|
||||
}
|
||||
}
|
||||
37
homeai-desktop/src/lib/constants.js
Normal file
37
homeai-desktop/src/lib/constants.js
Normal file
@@ -0,0 +1,37 @@
|
||||
export const DEFAULT_VOICE = 'af_heart'
|
||||
|
||||
export const VOICES = [
|
||||
{ id: 'af_heart', label: 'Heart (F, US)' },
|
||||
{ id: 'af_alloy', label: 'Alloy (F, US)' },
|
||||
{ id: 'af_aoede', label: 'Aoede (F, US)' },
|
||||
{ id: 'af_bella', label: 'Bella (F, US)' },
|
||||
{ id: 'af_jessica', label: 'Jessica (F, US)' },
|
||||
{ id: 'af_kore', label: 'Kore (F, US)' },
|
||||
{ id: 'af_nicole', label: 'Nicole (F, US)' },
|
||||
{ id: 'af_nova', label: 'Nova (F, US)' },
|
||||
{ id: 'af_river', label: 'River (F, US)' },
|
||||
{ id: 'af_sarah', label: 'Sarah (F, US)' },
|
||||
{ id: 'af_sky', label: 'Sky (F, US)' },
|
||||
{ id: 'am_adam', label: 'Adam (M, US)' },
|
||||
{ id: 'am_echo', label: 'Echo (M, US)' },
|
||||
{ id: 'am_eric', label: 'Eric (M, US)' },
|
||||
{ id: 'am_fenrir', label: 'Fenrir (M, US)' },
|
||||
{ id: 'am_liam', label: 'Liam (M, US)' },
|
||||
{ id: 'am_michael', label: 'Michael (M, US)' },
|
||||
{ id: 'am_onyx', label: 'Onyx (M, US)' },
|
||||
{ id: 'am_puck', label: 'Puck (M, US)' },
|
||||
{ id: 'bf_alice', label: 'Alice (F, UK)' },
|
||||
{ id: 'bf_emma', label: 'Emma (F, UK)' },
|
||||
{ id: 'bf_isabella', label: 'Isabella (F, UK)' },
|
||||
{ id: 'bf_lily', label: 'Lily (F, UK)' },
|
||||
{ id: 'bm_daniel', label: 'Daniel (M, UK)' },
|
||||
{ id: 'bm_fable', label: 'Fable (M, UK)' },
|
||||
{ id: 'bm_george', label: 'George (M, UK)' },
|
||||
{ id: 'bm_lewis', label: 'Lewis (M, UK)' },
|
||||
]
|
||||
|
||||
export const DEFAULT_SETTINGS = {
|
||||
voice: DEFAULT_VOICE,
|
||||
autoTts: true,
|
||||
sttMode: 'bridge', // 'bridge' or 'webspeech'
|
||||
}
|
||||
10
homeai-desktop/src/main.jsx
Normal file
10
homeai-desktop/src/main.jsx
Normal file
@@ -0,0 +1,10 @@
|
||||
import { StrictMode } from 'react'
|
||||
import { createRoot } from 'react-dom/client'
|
||||
import './index.css'
|
||||
import App from './App.jsx'
|
||||
|
||||
createRoot(document.getElementById('root')).render(
|
||||
<StrictMode>
|
||||
<App />
|
||||
</StrictMode>,
|
||||
)
|
||||
102
homeai-desktop/vite.config.js
Normal file
102
homeai-desktop/vite.config.js
Normal file
@@ -0,0 +1,102 @@
|
||||
import { defineConfig } from 'vite'
|
||||
import react from '@vitejs/plugin-react'
|
||||
import tailwindcss from '@tailwindcss/vite'
|
||||
|
||||
function bridgeProxyPlugin() {
|
||||
return {
|
||||
name: 'bridge-proxy',
|
||||
configureServer(server) {
|
||||
// Proxy a request to the OpenClaw bridge
|
||||
const proxyRequest = (targetPath) => async (req, res) => {
|
||||
if (req.method === 'OPTIONS') {
|
||||
res.writeHead(204, {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'POST, GET, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type',
|
||||
})
|
||||
res.end()
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const { default: http } = await import('http')
|
||||
const chunks = []
|
||||
for await (const chunk of req) chunks.push(chunk)
|
||||
const body = Buffer.concat(chunks)
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
const proxyReq = http.request(
|
||||
`http://localhost:8081${targetPath}`,
|
||||
{
|
||||
method: req.method,
|
||||
headers: {
|
||||
'Content-Type': req.headers['content-type'] || 'application/json',
|
||||
'Content-Length': body.length,
|
||||
},
|
||||
timeout: 120000,
|
||||
},
|
||||
(proxyRes) => {
|
||||
res.writeHead(proxyRes.statusCode, {
|
||||
'Content-Type': proxyRes.headers['content-type'] || 'application/json',
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
})
|
||||
proxyRes.pipe(res)
|
||||
proxyRes.on('end', resolve)
|
||||
proxyRes.on('error', resolve)
|
||||
}
|
||||
)
|
||||
proxyReq.on('error', reject)
|
||||
proxyReq.on('timeout', () => {
|
||||
proxyReq.destroy()
|
||||
reject(new Error('timeout'))
|
||||
})
|
||||
proxyReq.write(body)
|
||||
proxyReq.end()
|
||||
})
|
||||
} catch {
|
||||
if (!res.headersSent) {
|
||||
res.writeHead(502, { 'Content-Type': 'application/json' })
|
||||
res.end(JSON.stringify({ error: 'Bridge unreachable' }))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
server.middlewares.use('/api/agent/message', proxyRequest('/api/agent/message'))
|
||||
server.middlewares.use('/api/tts', proxyRequest('/api/tts'))
|
||||
server.middlewares.use('/api/stt', proxyRequest('/api/stt'))
|
||||
|
||||
// Health check — direct to bridge
|
||||
server.middlewares.use('/api/health', async (req, res) => {
|
||||
try {
|
||||
const { default: http } = await import('http')
|
||||
const start = Date.now()
|
||||
await new Promise((resolve, reject) => {
|
||||
const reqObj = http.get('http://localhost:8081/', { timeout: 5000 }, (resp) => {
|
||||
resp.resume()
|
||||
resolve()
|
||||
})
|
||||
reqObj.on('error', reject)
|
||||
reqObj.on('timeout', () => { reqObj.destroy(); reject(new Error('timeout')) })
|
||||
})
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' })
|
||||
res.end(JSON.stringify({ status: 'online', responseTime: Date.now() - start }))
|
||||
} catch {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' })
|
||||
res.end(JSON.stringify({ status: 'offline', responseTime: null }))
|
||||
}
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [
|
||||
bridgeProxyPlugin(),
|
||||
tailwindcss(),
|
||||
react(),
|
||||
],
|
||||
server: {
|
||||
host: '0.0.0.0',
|
||||
port: 5174,
|
||||
},
|
||||
})
|
||||
@@ -6,7 +6,7 @@
|
||||
|
||||
## Goal
|
||||
|
||||
Flash ESP32-S3-BOX-3 units with ESPHome. Each unit acts as a dumb room satellite: always-on mic, local wake word detection, audio playback, and an LVGL animated face showing assistant state. All intelligence stays on the Mac Mini.
|
||||
Flash ESP32-S3-BOX-3 units with ESPHome. Each unit acts as a dumb room satellite: always-on mic, on-device wake word detection, audio playback, and a display showing assistant state via static PNG face illustrations. All intelligence stays on the Mac Mini.
|
||||
|
||||
---
|
||||
|
||||
@@ -17,11 +17,12 @@ Flash ESP32-S3-BOX-3 units with ESPHome. Each unit acts as a dumb room satellite
|
||||
| SoC | ESP32-S3 (dual-core Xtensa, 240MHz) |
|
||||
| RAM | 512KB SRAM + 16MB PSRAM |
|
||||
| Flash | 16MB |
|
||||
| Display | 2.4" IPS LCD, 320×240, touchscreen |
|
||||
| Mic | Dual microphone array |
|
||||
| Speaker | Built-in 1W speaker |
|
||||
| Connectivity | WiFi 802.11b/g/n, BT 5.0 |
|
||||
| USB | USB-C (programming + power) |
|
||||
| Display | 2.4" IPS LCD, 320×240, touchscreen (ILI9xxx, model S3BOX) |
|
||||
| Audio ADC | ES7210 (dual mic array, 16kHz 16-bit) |
|
||||
| Audio DAC | ES8311 (speaker output, 48kHz 16-bit) |
|
||||
| Speaker | Built-in 1W |
|
||||
| Connectivity | WiFi 802.11b/g/n (2.4GHz only), BT 5.0 |
|
||||
| USB | USB-C (programming + power, native USB JTAG serial) |
|
||||
|
||||
---
|
||||
|
||||
@@ -29,273 +30,86 @@ Flash ESP32-S3-BOX-3 units with ESPHome. Each unit acts as a dumb room satellite
|
||||
|
||||
```
|
||||
ESP32-S3-BOX-3
|
||||
├── microWakeWord (on-device, always listening)
|
||||
│ └── triggers Wyoming Satellite on wake detection
|
||||
├── Wyoming Satellite
|
||||
│ ├── streams mic audio → Mac Mini Wyoming STT (port 10300)
|
||||
│ └── receives TTS audio ← Mac Mini Wyoming TTS (port 10301)
|
||||
├── LVGL Display
|
||||
│ └── animated face, driven by HA entity state
|
||||
├── micro_wake_word (on-device, always listening)
|
||||
│ └── "hey_jarvis" — triggers voice_assistant on wake detection
|
||||
├── voice_assistant (ESPHome component)
|
||||
│ ├── connects to Home Assistant via ESPHome API
|
||||
│ ├── HA routes audio → Mac Mini Wyoming STT (10.0.0.101:10300)
|
||||
│ ├── HA routes text → OpenClaw conversation agent (10.0.0.101:8081)
|
||||
│ └── HA routes response → Mac Mini Wyoming TTS (10.0.0.101:10301)
|
||||
├── Display (ili9xxx, model S3BOX, 320×240)
|
||||
│ └── static PNG faces per state (idle, listening, thinking, replying, error)
|
||||
└── ESPHome OTA
|
||||
└── firmware updates over WiFi
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Pin Map (ESP32-S3-BOX-3)
|
||||
|
||||
| Function | Pin(s) | Notes |
|
||||
|---|---|---|
|
||||
| I2S LRCLK | GPIO45 | strapping pin — warning ignored |
|
||||
| I2S BCLK | GPIO17 | |
|
||||
| I2S MCLK | GPIO2 | |
|
||||
| I2S DIN (mic) | GPIO16 | ES7210 ADC input |
|
||||
| I2S DOUT (speaker) | GPIO15 | ES8311 DAC output |
|
||||
| Speaker enable | GPIO46 | strapping pin — warning ignored |
|
||||
| I2C SCL | GPIO18 | audio codec control bus |
|
||||
| I2C SDA | GPIO8 | audio codec control bus |
|
||||
| SPI CLK (display) | GPIO7 | |
|
||||
| SPI MOSI (display) | GPIO6 | |
|
||||
| Display CS | GPIO5 | |
|
||||
| Display DC | GPIO4 | |
|
||||
| Display Reset | GPIO48 | inverted |
|
||||
| Backlight | GPIO47 | LEDC PWM |
|
||||
| Left top button | GPIO0 | strapping pin — mute toggle / factory reset |
|
||||
|
||||
---
|
||||
|
||||
## ESPHome Configuration
|
||||
|
||||
### Base Config Template
|
||||
|
||||
`esphome/base.yaml` — shared across all units:
|
||||
### Platform & Framework
|
||||
|
||||
```yaml
|
||||
esphome:
|
||||
name: homeai-${room}
|
||||
friendly_name: "HomeAI ${room_display}"
|
||||
platform: esp32
|
||||
board: esp32-s3-box-3
|
||||
esp32:
|
||||
board: esp32s3box
|
||||
flash_size: 16MB
|
||||
cpu_frequency: 240MHz
|
||||
framework:
|
||||
type: esp-idf
|
||||
sdkconfig_options:
|
||||
CONFIG_ESP32S3_DEFAULT_CPU_FREQ_240: "y"
|
||||
CONFIG_ESP32S3_DATA_CACHE_64KB: "y"
|
||||
CONFIG_ESP32S3_DATA_CACHE_LINE_64B: "y"
|
||||
|
||||
wifi:
|
||||
ssid: !secret wifi_ssid
|
||||
password: !secret wifi_password
|
||||
ap:
|
||||
ssid: "HomeAI Fallback"
|
||||
|
||||
api:
|
||||
encryption:
|
||||
key: !secret api_key
|
||||
|
||||
ota:
|
||||
password: !secret ota_password
|
||||
|
||||
logger:
|
||||
level: INFO
|
||||
psram:
|
||||
mode: octal
|
||||
speed: 80MHz
|
||||
```
|
||||
|
||||
### Room-Specific Config
|
||||
### Audio Stack
|
||||
|
||||
`esphome/s3-box-living-room.yaml`:
|
||||
Uses `i2s_audio` platform with external ADC/DAC codec chips:
|
||||
|
||||
```yaml
|
||||
substitutions:
|
||||
room: living-room
|
||||
room_display: "Living Room"
|
||||
mac_mini_ip: "192.168.1.x" # or Tailscale IP
|
||||
- **Microphone**: ES7210 ADC via I2S, 16kHz 16-bit mono
|
||||
- **Speaker**: ES8311 DAC via I2S, 48kHz 16-bit mono (left channel)
|
||||
- **Media player**: wraps speaker with volume control (min 50%, max 85%)
|
||||
|
||||
packages:
|
||||
base: !include base.yaml
|
||||
voice: !include voice.yaml
|
||||
display: !include display.yaml
|
||||
```
|
||||
### Wake Word
|
||||
|
||||
One file per room, only the substitutions change.
|
||||
On-device `micro_wake_word` component with `hey_jarvis` model. Can optionally be switched to Home Assistant streaming wake word via a selector entity.
|
||||
|
||||
### Voice / Wyoming Satellite — `esphome/voice.yaml`
|
||||
### Display
|
||||
|
||||
```yaml
|
||||
microphone:
|
||||
- platform: esp_adf
|
||||
id: mic
|
||||
`ili9xxx` platform with model `S3BOX`. Uses `update_interval: never` — display updates are triggered by scripts on voice assistant state changes. Static 320×240 PNG images for each state are compiled into firmware.
|
||||
|
||||
speaker:
|
||||
- platform: esp_adf
|
||||
id: spk
|
||||
### Voice Assistant
|
||||
|
||||
micro_wake_word:
|
||||
model: hey_jarvis # or custom model path
|
||||
on_wake_word_detected:
|
||||
- voice_assistant.start:
|
||||
|
||||
voice_assistant:
|
||||
microphone: mic
|
||||
speaker: spk
|
||||
noise_suppression_level: 2
|
||||
auto_gain: 31dBFS
|
||||
volume_multiplier: 2.0
|
||||
|
||||
on_listening:
|
||||
- display.page.show: page_listening
|
||||
- script.execute: animate_face_listening
|
||||
|
||||
on_stt_vad_end:
|
||||
- display.page.show: page_thinking
|
||||
- script.execute: animate_face_thinking
|
||||
|
||||
on_tts_start:
|
||||
- display.page.show: page_speaking
|
||||
- script.execute: animate_face_speaking
|
||||
|
||||
on_end:
|
||||
- display.page.show: page_idle
|
||||
- script.execute: animate_face_idle
|
||||
|
||||
on_error:
|
||||
- display.page.show: page_error
|
||||
- script.execute: animate_face_error
|
||||
```
|
||||
|
||||
**Note:** ESPHome's `voice_assistant` component connects to HA, which routes to Wyoming STT/TTS on the Mac Mini. This is the standard ESPHome → HA → Wyoming path.
|
||||
|
||||
### LVGL Display — `esphome/display.yaml`
|
||||
|
||||
```yaml
|
||||
display:
|
||||
- platform: ili9xxx
|
||||
model: ILI9341
|
||||
id: lcd
|
||||
cs_pin: GPIO5
|
||||
dc_pin: GPIO4
|
||||
reset_pin: GPIO48
|
||||
|
||||
touchscreen:
|
||||
- platform: tt21100
|
||||
id: touch
|
||||
|
||||
lvgl:
|
||||
displays:
|
||||
- lcd
|
||||
touchscreens:
|
||||
- touch
|
||||
|
||||
# Face widget — centered on screen
|
||||
widgets:
|
||||
- obj:
|
||||
id: face_container
|
||||
width: 320
|
||||
height: 240
|
||||
bg_color: 0x000000
|
||||
children:
|
||||
# Eyes (two circles)
|
||||
- obj:
|
||||
id: eye_left
|
||||
x: 90
|
||||
y: 90
|
||||
width: 50
|
||||
height: 50
|
||||
radius: 25
|
||||
bg_color: 0xFFFFFF
|
||||
- obj:
|
||||
id: eye_right
|
||||
x: 180
|
||||
y: 90
|
||||
width: 50
|
||||
height: 50
|
||||
radius: 25
|
||||
bg_color: 0xFFFFFF
|
||||
# Mouth (line/arc)
|
||||
- arc:
|
||||
id: mouth
|
||||
x: 110
|
||||
y: 160
|
||||
width: 100
|
||||
height: 40
|
||||
start_angle: 180
|
||||
end_angle: 360
|
||||
arc_color: 0xFFFFFF
|
||||
|
||||
pages:
|
||||
- id: page_idle
|
||||
- id: page_listening
|
||||
- id: page_thinking
|
||||
- id: page_speaking
|
||||
- id: page_error
|
||||
```
|
||||
|
||||
### LVGL Face State Animations — `esphome/animations.yaml`
|
||||
|
||||
```yaml
|
||||
script:
|
||||
- id: animate_face_idle
|
||||
then:
|
||||
- lvgl.widget.modify:
|
||||
id: eye_left
|
||||
height: 50 # normal open
|
||||
- lvgl.widget.modify:
|
||||
id: eye_right
|
||||
height: 50
|
||||
- lvgl.widget.modify:
|
||||
id: mouth
|
||||
arc_color: 0xFFFFFF
|
||||
|
||||
- id: animate_face_listening
|
||||
then:
|
||||
- lvgl.widget.modify:
|
||||
id: eye_left
|
||||
height: 60 # wider eyes
|
||||
- lvgl.widget.modify:
|
||||
id: eye_right
|
||||
height: 60
|
||||
- lvgl.widget.modify:
|
||||
id: mouth
|
||||
arc_color: 0x00BFFF # blue tint
|
||||
|
||||
- id: animate_face_thinking
|
||||
then:
|
||||
- lvgl.widget.modify:
|
||||
id: eye_left
|
||||
height: 20 # squinting
|
||||
- lvgl.widget.modify:
|
||||
id: eye_right
|
||||
height: 20
|
||||
|
||||
- id: animate_face_speaking
|
||||
then:
|
||||
- lvgl.widget.modify:
|
||||
id: mouth
|
||||
arc_color: 0x00FF88 # green speaking indicator
|
||||
|
||||
- id: animate_face_error
|
||||
then:
|
||||
- lvgl.widget.modify:
|
||||
id: eye_left
|
||||
bg_color: 0xFF2200 # red eyes
|
||||
- lvgl.widget.modify:
|
||||
id: eye_right
|
||||
bg_color: 0xFF2200
|
||||
```
|
||||
|
||||
> **Note:** True lip-sync animation (mouth moving with audio) is complex on ESP32. Phase 1: static states. Phase 2: amplitude-driven mouth height using speaker volume feedback.
|
||||
|
||||
---
|
||||
|
||||
## Secrets File
|
||||
|
||||
`esphome/secrets.yaml` (gitignored):
|
||||
|
||||
```yaml
|
||||
wifi_ssid: "YourNetwork"
|
||||
wifi_password: "YourPassword"
|
||||
api_key: "<32-byte base64 key>"
|
||||
ota_password: "YourOTAPassword"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Flash & Deployment Workflow
|
||||
|
||||
```bash
|
||||
# Install ESPHome
|
||||
pip install esphome
|
||||
|
||||
# Compile + flash via USB (first time)
|
||||
esphome run esphome/s3-box-living-room.yaml
|
||||
|
||||
# OTA update (subsequent)
|
||||
esphome upload esphome/s3-box-living-room.yaml --device <device-ip>
|
||||
|
||||
# View logs
|
||||
esphome logs esphome/s3-box-living-room.yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Home Assistant Integration
|
||||
|
||||
After flashing:
|
||||
1. HA discovers ESP32 automatically via mDNS
|
||||
2. Add device in HA → Settings → Devices
|
||||
3. Assign Wyoming voice assistant pipeline to the device
|
||||
4. Set up room-specific automations (e.g., "Living Room" light control from that satellite)
|
||||
ESPHome's `voice_assistant` component connects to HA via the ESPHome native API (not directly to Wyoming). HA orchestrates the pipeline:
|
||||
1. Audio → Wyoming STT (Mac Mini) → text
|
||||
2. Text → OpenClaw conversation agent → response
|
||||
3. Response → Wyoming TTS (Mac Mini) → audio back to ESP32
|
||||
|
||||
---
|
||||
|
||||
@@ -303,43 +117,71 @@ After flashing:
|
||||
|
||||
```
|
||||
homeai-esp32/
|
||||
├── PLAN.md
|
||||
├── setup.sh # env check + flash/ota/logs commands
|
||||
└── esphome/
|
||||
├── base.yaml
|
||||
├── voice.yaml
|
||||
├── display.yaml
|
||||
├── animations.yaml
|
||||
├── s3-box-living-room.yaml
|
||||
├── s3-box-bedroom.yaml # template, fill in when hardware available
|
||||
├── s3-box-kitchen.yaml # template
|
||||
└── secrets.yaml # gitignored
|
||||
├── secrets.yaml # gitignored — WiFi + API key
|
||||
├── homeai-living-room.yaml # first unit (full config)
|
||||
├── homeai-bedroom.yaml # future: copy + change substitutions
|
||||
├── homeai-kitchen.yaml # future: copy + change substitutions
|
||||
└── illustrations/ # 320×240 PNG face images
|
||||
├── idle.png
|
||||
├── loading.png
|
||||
├── listening.png
|
||||
├── thinking.png
|
||||
├── replying.png
|
||||
├── error.png
|
||||
└── timer_finished.png
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Wake Word Decisions
|
||||
## ESPHome Environment
|
||||
|
||||
```bash
|
||||
# Dedicated venv (Python 3.12) — do NOT share with voice/whisper venvs
|
||||
~/homeai-esphome-env/bin/esphome version # ESPHome 2026.2.4+
|
||||
|
||||
# Quick commands
|
||||
cd ~/gitea/homeai/homeai-esp32
|
||||
~/homeai-esphome-env/bin/esphome run esphome/homeai-living-room.yaml # compile + flash
|
||||
~/homeai-esphome-env/bin/esphome logs esphome/homeai-living-room.yaml # stream logs
|
||||
|
||||
# Or use the setup script
|
||||
./setup.sh flash # compile + USB flash
|
||||
./setup.sh ota # compile + OTA update
|
||||
./setup.sh logs # stream device logs
|
||||
./setup.sh validate # check YAML without compiling
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Wake Word Options
|
||||
|
||||
| Option | Latency | Privacy | Effort |
|
||||
|---|---|---|---|
|
||||
| `hey_jarvis` (built-in microWakeWord) | ~200ms | On-device | Zero |
|
||||
| `hey_jarvis` (built-in micro_wake_word) | ~200ms | On-device | Zero |
|
||||
| Custom word (trained model) | ~200ms | On-device | High — requires 50+ recordings |
|
||||
| Mac Mini openWakeWord (stream audio) | ~500ms | On Mac | Medium |
|
||||
| HA streaming wake word | ~500ms | On Mac Mini | Medium — stream all audio |
|
||||
|
||||
**Recommendation:** Start with `hey_jarvis`. Train a custom word (character's name) once character name is finalised.
|
||||
**Current**: `hey_jarvis` on-device. Train a custom word (character's name) once finalised.
|
||||
|
||||
---
|
||||
|
||||
## Implementation Steps
|
||||
|
||||
- [ ] Install ESPHome: `pip install esphome`
|
||||
- [ ] Write `esphome/secrets.yaml` (gitignored)
|
||||
- [ ] Write `base.yaml`, `voice.yaml`, `display.yaml`, `animations.yaml`
|
||||
- [ ] Write `s3-box-living-room.yaml` for first unit
|
||||
- [ ] Flash first unit via USB: `esphome run s3-box-living-room.yaml`
|
||||
- [ ] Verify unit appears in HA device list
|
||||
- [ ] Assign Wyoming voice pipeline to unit in HA
|
||||
- [ ] Test: speak wake word → transcription → LLM response → spoken reply
|
||||
- [ ] Test: LVGL face cycles through idle → listening → thinking → speaking
|
||||
- [ ] Verify OTA update works: change LVGL color, deploy wirelessly
|
||||
- [x] Install ESPHome in `~/homeai-esphome-env` (Python 3.12)
|
||||
- [x] Write `esphome/secrets.yaml` (gitignored)
|
||||
- [x] Write `homeai-living-room.yaml` (based on official S3-BOX-3 reference config)
|
||||
- [x] Generate placeholder face illustrations (7 PNGs, 320×240)
|
||||
- [x] Write `setup.sh` with flash/ota/logs/validate commands
|
||||
- [x] Write `deploy.sh` with OTA deploy, image management, multi-unit support
|
||||
- [x] Flash first unit via USB (living room)
|
||||
- [x] Verify unit appears in HA device list
|
||||
- [x] Assign Wyoming voice pipeline to unit in HA
|
||||
- [x] Test: speak wake word → transcription → LLM response → spoken reply
|
||||
- [x] Test: display cycles through idle → listening → thinking → replying
|
||||
- [x] Verify OTA update works: change config, deploy wirelessly
|
||||
- [ ] Write config templates for remaining rooms (bedroom, kitchen)
|
||||
- [ ] Flash remaining units, verify each works independently
|
||||
- [ ] Document final MAC address → room name mapping
|
||||
@@ -351,7 +193,17 @@ homeai-esp32/
|
||||
- [ ] Wake word "hey jarvis" triggers pipeline reliably from 3m distance
|
||||
- [ ] STT transcription accuracy >90% for clear speech in quiet room
|
||||
- [ ] TTS audio plays clearly through ESP32 speaker
|
||||
- [ ] LVGL face shows correct state for idle / listening / thinking / speaking / error
|
||||
- [ ] Display shows correct state for idle / listening / thinking / replying / error / muted
|
||||
- [ ] OTA firmware updates work without USB cable
|
||||
- [ ] Unit reconnects automatically after WiFi drop
|
||||
- [ ] Unit survives power cycle and resumes normal operation
|
||||
|
||||
---
|
||||
|
||||
## Known Constraints
|
||||
|
||||
- **Memory**: voice_assistant + micro_wake_word + display is near the limit. Do NOT add Bluetooth or LVGL widgets — they will cause crashes.
|
||||
- **WiFi**: 2.4GHz only. 5GHz networks are not supported.
|
||||
- **Speaker**: 1W built-in. Volume capped at 85% to avoid distortion.
|
||||
- **Display**: Static PNGs compiled into firmware. To change images, reflash via OTA (~1-2 min).
|
||||
- **First compile**: Downloads ESP-IDF toolchain (~500MB), takes 5-10 minutes. Incremental builds are 1-2 minutes.
|
||||
|
||||
244
homeai-esp32/deploy.sh
Executable file
244
homeai-esp32/deploy.sh
Executable file
@@ -0,0 +1,244 @@
|
||||
#!/usr/bin/env bash
|
||||
# homeai-esp32/deploy.sh — Quick OTA deploy for ESP32-S3-BOX-3 satellites
|
||||
#
|
||||
# Usage:
|
||||
# ./deploy.sh — deploy config + images to living room (default)
|
||||
# ./deploy.sh bedroom — deploy to bedroom unit
|
||||
# ./deploy.sh --images-only — deploy existing PNGs from illustrations/ (no regen)
|
||||
# ./deploy.sh --regen-images — regenerate placeholder PNGs then deploy
|
||||
# ./deploy.sh --validate — validate config without deploying
|
||||
# ./deploy.sh --all — deploy to all configured units
|
||||
#
|
||||
# Images are compiled into firmware, so any PNG changes require a reflash.
|
||||
# To use custom images: drop 320x240 PNGs into esphome/illustrations/ then ./deploy.sh
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
ESPHOME_DIR="${SCRIPT_DIR}/esphome"
|
||||
ESPHOME_VENV="${HOME}/homeai-esphome-env"
|
||||
ESPHOME="${ESPHOME_VENV}/bin/esphome"
|
||||
PYTHON="${ESPHOME_VENV}/bin/python3"
|
||||
ILLUSTRATIONS_DIR="${ESPHOME_DIR}/illustrations"
|
||||
|
||||
# Colors
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
CYAN='\033[0;36m'
|
||||
NC='\033[0m'
|
||||
|
||||
log_info() { echo -e "${BLUE}[INFO]${NC} $*"; }
|
||||
log_ok() { echo -e "${GREEN}[OK]${NC} $*"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $*"; }
|
||||
log_error() { echo -e "${RED}[ERROR]${NC} $*"; exit 1; }
|
||||
log_step() { echo -e "${CYAN}[STEP]${NC} $*"; }
|
||||
|
||||
# ─── Available units ──────────────────────────────────────────────────────────
|
||||
|
||||
UNIT_NAMES=(living-room bedroom kitchen)
|
||||
DEFAULT_UNIT="living-room"
|
||||
|
||||
unit_config() {
|
||||
case "$1" in
|
||||
living-room) echo "homeai-living-room.yaml" ;;
|
||||
bedroom) echo "homeai-bedroom.yaml" ;;
|
||||
kitchen) echo "homeai-kitchen.yaml" ;;
|
||||
*) echo "" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
unit_list() {
|
||||
echo "${UNIT_NAMES[*]}"
|
||||
}
|
||||
|
||||
# ─── Face image generator ────────────────────────────────────────────────────
|
||||
|
||||
generate_faces() {
|
||||
log_step "Generating face illustrations (320x240 PNG)..."
|
||||
"${PYTHON}" << 'PYEOF'
|
||||
from PIL import Image, ImageDraw
|
||||
import os
|
||||
|
||||
WIDTH, HEIGHT = 320, 240
|
||||
OUT = os.environ.get("ILLUSTRATIONS_DIR", "esphome/illustrations")
|
||||
|
||||
def draw_face(draw, eye_color, mouth_color, eye_height=40, eye_y=80, mouth_style="smile"):
|
||||
ex1, ey1 = 95, eye_y
|
||||
draw.ellipse([ex1-25, ey1-eye_height//2, ex1+25, ey1+eye_height//2], fill=eye_color)
|
||||
ex2, ey2 = 225, eye_y
|
||||
draw.ellipse([ex2-25, ey2-eye_height//2, ex2+25, ey2+eye_height//2], fill=eye_color)
|
||||
if mouth_style == "smile":
|
||||
draw.arc([110, 140, 210, 200], start=0, end=180, fill=mouth_color, width=3)
|
||||
elif mouth_style == "open":
|
||||
draw.ellipse([135, 150, 185, 190], fill=mouth_color)
|
||||
elif mouth_style == "flat":
|
||||
draw.line([120, 170, 200, 170], fill=mouth_color, width=3)
|
||||
elif mouth_style == "frown":
|
||||
draw.arc([110, 160, 210, 220], start=180, end=360, fill=mouth_color, width=3)
|
||||
|
||||
states = {
|
||||
"idle": {"eye_color": "#FFFFFF", "mouth_color": "#FFFFFF", "eye_height": 40, "mouth_style": "smile"},
|
||||
"loading": {"eye_color": "#6366F1", "mouth_color": "#6366F1", "eye_height": 30, "mouth_style": "flat"},
|
||||
"listening": {"eye_color": "#00BFFF", "mouth_color": "#00BFFF", "eye_height": 50, "mouth_style": "open"},
|
||||
"thinking": {"eye_color": "#A78BFA", "mouth_color": "#A78BFA", "eye_height": 20, "mouth_style": "flat"},
|
||||
"replying": {"eye_color": "#10B981", "mouth_color": "#10B981", "eye_height": 40, "mouth_style": "open"},
|
||||
"error": {"eye_color": "#EF4444", "mouth_color": "#EF4444", "eye_height": 40, "mouth_style": "frown"},
|
||||
"timer_finished": {"eye_color": "#F59E0B", "mouth_color": "#F59E0B", "eye_height": 50, "mouth_style": "smile"},
|
||||
}
|
||||
|
||||
os.makedirs(OUT, exist_ok=True)
|
||||
for name, p in states.items():
|
||||
img = Image.new("RGBA", (WIDTH, HEIGHT), (0, 0, 0, 255))
|
||||
draw = ImageDraw.Draw(img)
|
||||
draw_face(draw, p["eye_color"], p["mouth_color"], p["eye_height"], mouth_style=p["mouth_style"])
|
||||
img.save(f"{OUT}/{name}.png")
|
||||
print(f" {name}.png")
|
||||
PYEOF
|
||||
log_ok "Generated 7 face illustrations"
|
||||
}
|
||||
|
||||
# ─── Check existing images ───────────────────────────────────────────────────
|
||||
|
||||
REQUIRED_IMAGES=(idle loading listening thinking replying error timer_finished)
|
||||
|
||||
check_images() {
|
||||
local missing=()
|
||||
for name in "${REQUIRED_IMAGES[@]}"; do
|
||||
if [[ ! -f "${ILLUSTRATIONS_DIR}/${name}.png" ]]; then
|
||||
missing+=("${name}.png")
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ ${#missing[@]} -gt 0 ]]; then
|
||||
log_error "Missing illustrations: ${missing[*]}
|
||||
Place 320x240 PNGs in ${ILLUSTRATIONS_DIR}/ or use --regen-images to generate placeholders."
|
||||
fi
|
||||
|
||||
log_ok "All ${#REQUIRED_IMAGES[@]} illustrations present in illustrations/"
|
||||
for name in "${REQUIRED_IMAGES[@]}"; do
|
||||
local size
|
||||
size=$(wc -c < "${ILLUSTRATIONS_DIR}/${name}.png" | tr -d ' ')
|
||||
echo -e " ${name}.png (${size} bytes)"
|
||||
done
|
||||
}
|
||||
|
||||
# ─── Deploy to a single unit ─────────────────────────────────────────────────
|
||||
|
||||
deploy_unit() {
|
||||
local unit_name="$1"
|
||||
local config
|
||||
config="$(unit_config "$unit_name")"
|
||||
|
||||
if [[ -z "$config" ]]; then
|
||||
log_error "Unknown unit: ${unit_name}. Available: $(unit_list)"
|
||||
fi
|
||||
|
||||
local config_path="${ESPHOME_DIR}/${config}"
|
||||
if [[ ! -f "$config_path" ]]; then
|
||||
log_error "Config not found: ${config_path}"
|
||||
fi
|
||||
|
||||
log_step "Validating ${config}..."
|
||||
cd "${ESPHOME_DIR}"
|
||||
"${ESPHOME}" config "${config}" > /dev/null
|
||||
log_ok "Config valid"
|
||||
|
||||
log_step "Compiling + OTA deploying ${config}..."
|
||||
"${ESPHOME}" run "${config}" --device OTA 2>&1
|
||||
log_ok "Deployed to ${unit_name}"
|
||||
}
|
||||
|
||||
# ─── Main ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
IMAGES_ONLY=false
|
||||
REGEN_IMAGES=false
|
||||
VALIDATE_ONLY=false
|
||||
DEPLOY_ALL=false
|
||||
TARGET="${DEFAULT_UNIT}"
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--images-only) IMAGES_ONLY=true; shift ;;
|
||||
--regen-images) REGEN_IMAGES=true; shift ;;
|
||||
--validate) VALIDATE_ONLY=true; shift ;;
|
||||
--all) DEPLOY_ALL=true; shift ;;
|
||||
--help|-h)
|
||||
echo "Usage: $0 [unit-name] [--images-only] [--regen-images] [--validate] [--all]"
|
||||
echo ""
|
||||
echo "Units: $(unit_list)"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " --images-only Deploy existing PNGs from illustrations/ (for custom images)"
|
||||
echo " --regen-images Regenerate placeholder face PNGs then deploy"
|
||||
echo " --validate Validate config without deploying"
|
||||
echo " --all Deploy to all configured units"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 # deploy config to living-room"
|
||||
echo " $0 bedroom # deploy to bedroom"
|
||||
echo " $0 --images-only # deploy with current images (custom or generated)"
|
||||
echo " $0 --regen-images # regenerate placeholder faces + deploy"
|
||||
echo " $0 --all # deploy to all units"
|
||||
echo ""
|
||||
echo "Custom images: drop 320x240 PNGs into esphome/illustrations/"
|
||||
echo "Required files: ${REQUIRED_IMAGES[*]}"
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
if [[ -n "$(unit_config "$1")" ]]; then
|
||||
TARGET="$1"
|
||||
else
|
||||
log_error "Unknown option or unit: $1. Use --help for usage."
|
||||
fi
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Check ESPHome
|
||||
if [[ ! -x "${ESPHOME}" ]]; then
|
||||
log_error "ESPHome not found at ${ESPHOME}. Run setup.sh first."
|
||||
fi
|
||||
|
||||
# Regenerate placeholder images if requested
|
||||
if $REGEN_IMAGES; then
|
||||
export ILLUSTRATIONS_DIR
|
||||
generate_faces
|
||||
fi
|
||||
|
||||
# Check existing images if deploying with --images-only (or always before deploy)
|
||||
if $IMAGES_ONLY; then
|
||||
check_images
|
||||
fi
|
||||
|
||||
# Validate only
|
||||
if $VALIDATE_ONLY; then
|
||||
cd "${ESPHOME_DIR}"
|
||||
for unit_name in "${UNIT_NAMES[@]}"; do
|
||||
config="$(unit_config "$unit_name")"
|
||||
if [[ -f "${config}" ]]; then
|
||||
log_step "Validating ${config}..."
|
||||
"${ESPHOME}" config "${config}" > /dev/null && log_ok "${config} valid" || log_warn "${config} invalid"
|
||||
fi
|
||||
done
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Deploy
|
||||
if $DEPLOY_ALL; then
|
||||
for unit_name in "${UNIT_NAMES[@]}"; do
|
||||
config="$(unit_config "$unit_name")"
|
||||
if [[ -f "${ESPHOME_DIR}/${config}" ]]; then
|
||||
deploy_unit "$unit_name"
|
||||
else
|
||||
log_warn "Skipping ${unit_name} — ${config} not found"
|
||||
fi
|
||||
done
|
||||
else
|
||||
deploy_unit "$TARGET"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
log_ok "Deploy complete!"
|
||||
5
homeai-esp32/esphome/.gitignore
vendored
Normal file
5
homeai-esp32/esphome/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Gitignore settings for ESPHome
|
||||
# This is an example and may include too much for your use-case.
|
||||
# You can modify this file to suit your needs.
|
||||
/.esphome/
|
||||
/secrets.yaml
|
||||
865
homeai-esp32/esphome/homeai-living-room.yaml
Normal file
865
homeai-esp32/esphome/homeai-living-room.yaml
Normal file
@@ -0,0 +1,865 @@
|
||||
---
|
||||
# HomeAI Living Room Satellite — ESP32-S3-BOX-3
|
||||
# Based on official ESPHome voice assistant config
|
||||
# https://github.com/esphome/wake-word-voice-assistants
|
||||
|
||||
substitutions:
|
||||
name: homeai-living-room
|
||||
friendly_name: HomeAI Living Room
|
||||
|
||||
# Face illustrations — compiled into firmware (320x240 PNG)
|
||||
loading_illustration_file: illustrations/loading.png
|
||||
idle_illustration_file: illustrations/idle.png
|
||||
listening_illustration_file: illustrations/listening.png
|
||||
thinking_illustration_file: illustrations/thinking.png
|
||||
replying_illustration_file: illustrations/replying.png
|
||||
error_illustration_file: illustrations/error.png
|
||||
timer_finished_illustration_file: illustrations/timer_finished.png
|
||||
|
||||
# Dark background for all states (matches HomeAI dashboard theme)
|
||||
loading_illustration_background_color: "000000"
|
||||
idle_illustration_background_color: "000000"
|
||||
listening_illustration_background_color: "000000"
|
||||
thinking_illustration_background_color: "000000"
|
||||
replying_illustration_background_color: "000000"
|
||||
error_illustration_background_color: "000000"
|
||||
|
||||
voice_assist_idle_phase_id: "1"
|
||||
voice_assist_listening_phase_id: "2"
|
||||
voice_assist_thinking_phase_id: "3"
|
||||
voice_assist_replying_phase_id: "4"
|
||||
voice_assist_not_ready_phase_id: "10"
|
||||
voice_assist_error_phase_id: "11"
|
||||
voice_assist_muted_phase_id: "12"
|
||||
voice_assist_timer_finished_phase_id: "20"
|
||||
|
||||
font_glyphsets: "GF_Latin_Core"
|
||||
font_family: Figtree
|
||||
|
||||
esphome:
|
||||
name: ${name}
|
||||
friendly_name: ${friendly_name}
|
||||
min_version: 2025.5.0
|
||||
name_add_mac_suffix: false
|
||||
on_boot:
|
||||
priority: 600
|
||||
then:
|
||||
- script.execute: draw_display
|
||||
- delay: 30s
|
||||
- if:
|
||||
condition:
|
||||
lambda: return id(init_in_progress);
|
||||
then:
|
||||
- lambda: id(init_in_progress) = false;
|
||||
- script.execute: draw_display
|
||||
|
||||
esp32:
|
||||
board: esp32s3box
|
||||
flash_size: 16MB
|
||||
cpu_frequency: 240MHz
|
||||
framework:
|
||||
type: esp-idf
|
||||
sdkconfig_options:
|
||||
CONFIG_ESP32S3_DEFAULT_CPU_FREQ_240: "y"
|
||||
CONFIG_ESP32S3_DATA_CACHE_64KB: "y"
|
||||
CONFIG_ESP32S3_DATA_CACHE_LINE_64B: "y"
|
||||
|
||||
psram:
|
||||
mode: octal
|
||||
speed: 80MHz
|
||||
|
||||
wifi:
|
||||
ssid: !secret wifi_ssid
|
||||
password: !secret wifi_password
|
||||
ap:
|
||||
ssid: "HomeAI Fallback"
|
||||
on_connect:
|
||||
- script.execute: draw_display
|
||||
on_disconnect:
|
||||
- script.execute: draw_display
|
||||
|
||||
captive_portal:
|
||||
|
||||
api:
|
||||
encryption:
|
||||
key: !secret api_key
|
||||
# Prevent device from rebooting if HA connection drops temporarily
|
||||
reboot_timeout: 0s
|
||||
on_client_connected:
|
||||
- script.execute: draw_display
|
||||
on_client_disconnected:
|
||||
# Debounce: wait 5s before showing "HA not found" to avoid flicker on brief drops
|
||||
- delay: 5s
|
||||
- if:
|
||||
condition:
|
||||
not:
|
||||
api.connected:
|
||||
then:
|
||||
- script.execute: draw_display
|
||||
|
||||
ota:
|
||||
- platform: esphome
|
||||
id: ota_esphome
|
||||
|
||||
logger:
|
||||
hardware_uart: USB_SERIAL_JTAG
|
||||
|
||||
button:
|
||||
- platform: factory_reset
|
||||
id: factory_reset_btn
|
||||
internal: true
|
||||
|
||||
binary_sensor:
|
||||
- platform: gpio
|
||||
pin:
|
||||
number: GPIO0
|
||||
ignore_strapping_warning: true
|
||||
mode: INPUT_PULLUP
|
||||
inverted: true
|
||||
id: left_top_button
|
||||
internal: true
|
||||
on_multi_click:
|
||||
# Short press: dismiss timer / toggle mute
|
||||
- timing:
|
||||
- ON for at least 50ms
|
||||
- OFF for at least 50ms
|
||||
then:
|
||||
- if:
|
||||
condition:
|
||||
switch.is_on: timer_ringing
|
||||
then:
|
||||
- switch.turn_off: timer_ringing
|
||||
else:
|
||||
- switch.toggle: mute
|
||||
# Long press (10s): factory reset
|
||||
- timing:
|
||||
- ON for at least 10s
|
||||
then:
|
||||
- button.press: factory_reset_btn
|
||||
|
||||
# --- Display backlight ---
|
||||
|
||||
output:
|
||||
- platform: ledc
|
||||
pin: GPIO47
|
||||
id: backlight_output
|
||||
|
||||
light:
|
||||
- platform: monochromatic
|
||||
id: led
|
||||
name: Screen
|
||||
icon: "mdi:television"
|
||||
entity_category: config
|
||||
output: backlight_output
|
||||
restore_mode: RESTORE_DEFAULT_ON
|
||||
default_transition_length: 250ms
|
||||
|
||||
# --- Audio hardware ---
|
||||
|
||||
i2c:
|
||||
scl: GPIO18
|
||||
sda: GPIO8
|
||||
|
||||
i2s_audio:
|
||||
- id: i2s_audio_bus
|
||||
i2s_lrclk_pin:
|
||||
number: GPIO45
|
||||
ignore_strapping_warning: true
|
||||
i2s_bclk_pin: GPIO17
|
||||
i2s_mclk_pin: GPIO2
|
||||
|
||||
audio_adc:
|
||||
- platform: es7210
|
||||
id: es7210_adc
|
||||
bits_per_sample: 16bit
|
||||
sample_rate: 16000
|
||||
|
||||
audio_dac:
|
||||
- platform: es8311
|
||||
id: es8311_dac
|
||||
bits_per_sample: 16bit
|
||||
sample_rate: 48000
|
||||
|
||||
microphone:
|
||||
- platform: i2s_audio
|
||||
id: box_mic
|
||||
sample_rate: 16000
|
||||
i2s_din_pin: GPIO16
|
||||
bits_per_sample: 16bit
|
||||
adc_type: external
|
||||
|
||||
speaker:
|
||||
- platform: i2s_audio
|
||||
id: box_speaker
|
||||
i2s_dout_pin: GPIO15
|
||||
dac_type: external
|
||||
sample_rate: 48000
|
||||
bits_per_sample: 16bit
|
||||
channel: left
|
||||
audio_dac: es8311_dac
|
||||
buffer_duration: 100ms
|
||||
|
||||
media_player:
|
||||
- platform: speaker
|
||||
name: None
|
||||
id: speaker_media_player
|
||||
volume_min: 0.5
|
||||
volume_max: 0.85
|
||||
announcement_pipeline:
|
||||
speaker: box_speaker
|
||||
format: FLAC
|
||||
sample_rate: 48000
|
||||
num_channels: 1
|
||||
files:
|
||||
- id: timer_finished_sound
|
||||
file: https://github.com/esphome/home-assistant-voice-pe/raw/dev/sounds/timer_finished.flac
|
||||
on_announcement:
|
||||
- if:
|
||||
condition:
|
||||
- microphone.is_capturing:
|
||||
then:
|
||||
- script.execute: stop_wake_word
|
||||
- if:
|
||||
condition:
|
||||
- lambda: return id(wake_word_engine_location).current_option() == "In Home Assistant";
|
||||
then:
|
||||
- wait_until:
|
||||
- not:
|
||||
voice_assistant.is_running:
|
||||
- if:
|
||||
condition:
|
||||
not:
|
||||
voice_assistant.is_running:
|
||||
then:
|
||||
- lambda: id(voice_assistant_phase) = ${voice_assist_muted_phase_id};
|
||||
- script.execute: draw_display
|
||||
on_idle:
|
||||
- if:
|
||||
condition:
|
||||
not:
|
||||
voice_assistant.is_running:
|
||||
then:
|
||||
- script.execute: start_wake_word
|
||||
- script.execute: set_idle_or_mute_phase
|
||||
- script.execute: draw_display
|
||||
|
||||
# --- Wake word (on-device) ---
|
||||
|
||||
micro_wake_word:
|
||||
id: mww
|
||||
models:
|
||||
- hey_jarvis
|
||||
on_wake_word_detected:
|
||||
- voice_assistant.start:
|
||||
wake_word: !lambda return wake_word;
|
||||
|
||||
# --- Voice assistant ---
|
||||
|
||||
voice_assistant:
|
||||
id: va
|
||||
microphone: box_mic
|
||||
media_player: speaker_media_player
|
||||
micro_wake_word: mww
|
||||
noise_suppression_level: 2
|
||||
auto_gain: 31dBFS
|
||||
volume_multiplier: 2.0
|
||||
on_listening:
|
||||
- lambda: id(voice_assistant_phase) = ${voice_assist_listening_phase_id};
|
||||
- text_sensor.template.publish:
|
||||
id: text_request
|
||||
state: "..."
|
||||
- text_sensor.template.publish:
|
||||
id: text_response
|
||||
state: "..."
|
||||
- script.execute: draw_display
|
||||
on_stt_vad_end:
|
||||
- lambda: id(voice_assistant_phase) = ${voice_assist_thinking_phase_id};
|
||||
- script.execute: draw_display
|
||||
on_stt_end:
|
||||
- text_sensor.template.publish:
|
||||
id: text_request
|
||||
state: !lambda return x;
|
||||
- script.execute: draw_display
|
||||
on_tts_start:
|
||||
- text_sensor.template.publish:
|
||||
id: text_response
|
||||
state: !lambda return x;
|
||||
- lambda: id(voice_assistant_phase) = ${voice_assist_replying_phase_id};
|
||||
- script.execute: draw_display
|
||||
on_end:
|
||||
- wait_until:
|
||||
condition:
|
||||
- media_player.is_announcing:
|
||||
timeout: 0.5s
|
||||
- wait_until:
|
||||
- and:
|
||||
- not:
|
||||
media_player.is_announcing:
|
||||
- not:
|
||||
speaker.is_playing:
|
||||
- if:
|
||||
condition:
|
||||
- lambda: return id(wake_word_engine_location).current_option() == "On device";
|
||||
then:
|
||||
- lambda: id(va).set_use_wake_word(false);
|
||||
- micro_wake_word.start:
|
||||
- script.execute: set_idle_or_mute_phase
|
||||
- script.execute: draw_display
|
||||
- text_sensor.template.publish:
|
||||
id: text_request
|
||||
state: ""
|
||||
- text_sensor.template.publish:
|
||||
id: text_response
|
||||
state: ""
|
||||
on_error:
|
||||
- if:
|
||||
condition:
|
||||
lambda: return !id(init_in_progress);
|
||||
then:
|
||||
- lambda: id(voice_assistant_phase) = ${voice_assist_error_phase_id};
|
||||
- script.execute: draw_display
|
||||
- delay: 1s
|
||||
- if:
|
||||
condition:
|
||||
switch.is_off: mute
|
||||
then:
|
||||
- lambda: id(voice_assistant_phase) = ${voice_assist_idle_phase_id};
|
||||
else:
|
||||
- lambda: id(voice_assistant_phase) = ${voice_assist_muted_phase_id};
|
||||
- script.execute: draw_display
|
||||
on_client_connected:
|
||||
- lambda: id(init_in_progress) = false;
|
||||
- script.execute: start_wake_word
|
||||
- script.execute: set_idle_or_mute_phase
|
||||
- script.execute: draw_display
|
||||
on_client_disconnected:
|
||||
- script.execute: stop_wake_word
|
||||
- lambda: id(voice_assistant_phase) = ${voice_assist_not_ready_phase_id};
|
||||
- script.execute: draw_display
|
||||
on_timer_started:
|
||||
- script.execute: draw_display
|
||||
on_timer_cancelled:
|
||||
- script.execute: draw_display
|
||||
on_timer_updated:
|
||||
- script.execute: draw_display
|
||||
on_timer_tick:
|
||||
- script.execute: draw_display
|
||||
on_timer_finished:
|
||||
- switch.turn_on: timer_ringing
|
||||
- wait_until:
|
||||
media_player.is_announcing:
|
||||
- lambda: id(voice_assistant_phase) = ${voice_assist_timer_finished_phase_id};
|
||||
- script.execute: draw_display
|
||||
|
||||
# --- Scripts ---
|
||||
|
||||
script:
|
||||
- id: draw_display
|
||||
then:
|
||||
- if:
|
||||
condition:
|
||||
lambda: return !id(init_in_progress);
|
||||
then:
|
||||
- if:
|
||||
condition:
|
||||
wifi.connected:
|
||||
then:
|
||||
- if:
|
||||
condition:
|
||||
api.connected:
|
||||
then:
|
||||
- lambda: |
|
||||
switch(id(voice_assistant_phase)) {
|
||||
case ${voice_assist_listening_phase_id}:
|
||||
id(s3_box_lcd).show_page(listening_page);
|
||||
id(s3_box_lcd).update();
|
||||
break;
|
||||
case ${voice_assist_thinking_phase_id}:
|
||||
id(s3_box_lcd).show_page(thinking_page);
|
||||
id(s3_box_lcd).update();
|
||||
break;
|
||||
case ${voice_assist_replying_phase_id}:
|
||||
id(s3_box_lcd).show_page(replying_page);
|
||||
id(s3_box_lcd).update();
|
||||
break;
|
||||
case ${voice_assist_error_phase_id}:
|
||||
id(s3_box_lcd).show_page(error_page);
|
||||
id(s3_box_lcd).update();
|
||||
break;
|
||||
case ${voice_assist_muted_phase_id}:
|
||||
id(s3_box_lcd).show_page(muted_page);
|
||||
id(s3_box_lcd).update();
|
||||
break;
|
||||
case ${voice_assist_not_ready_phase_id}:
|
||||
id(s3_box_lcd).show_page(no_ha_page);
|
||||
id(s3_box_lcd).update();
|
||||
break;
|
||||
case ${voice_assist_timer_finished_phase_id}:
|
||||
id(s3_box_lcd).show_page(timer_finished_page);
|
||||
id(s3_box_lcd).update();
|
||||
break;
|
||||
default:
|
||||
id(s3_box_lcd).show_page(idle_page);
|
||||
id(s3_box_lcd).update();
|
||||
}
|
||||
else:
|
||||
- display.page.show: no_ha_page
|
||||
- component.update: s3_box_lcd
|
||||
else:
|
||||
- display.page.show: no_wifi_page
|
||||
- component.update: s3_box_lcd
|
||||
else:
|
||||
- display.page.show: initializing_page
|
||||
- component.update: s3_box_lcd
|
||||
|
||||
- id: fetch_first_active_timer
|
||||
then:
|
||||
- lambda: |
|
||||
const auto &timers = id(va).get_timers();
|
||||
auto output_timer = timers.begin()->second;
|
||||
for (const auto &timer : timers) {
|
||||
if (timer.second.is_active && timer.second.seconds_left <= output_timer.seconds_left) {
|
||||
output_timer = timer.second;
|
||||
}
|
||||
}
|
||||
id(global_first_active_timer) = output_timer;
|
||||
|
||||
- id: check_if_timers_active
|
||||
then:
|
||||
- lambda: |
|
||||
const auto &timers = id(va).get_timers();
|
||||
bool output = false;
|
||||
for (const auto &timer : timers) {
|
||||
if (timer.second.is_active) { output = true; }
|
||||
}
|
||||
id(global_is_timer_active) = output;
|
||||
|
||||
- id: fetch_first_timer
|
||||
then:
|
||||
- lambda: |
|
||||
const auto &timers = id(va).get_timers();
|
||||
auto output_timer = timers.begin()->second;
|
||||
for (const auto &timer : timers) {
|
||||
if (timer.second.seconds_left <= output_timer.seconds_left) {
|
||||
output_timer = timer.second;
|
||||
}
|
||||
}
|
||||
id(global_first_timer) = output_timer;
|
||||
|
||||
- id: check_if_timers
|
||||
then:
|
||||
- lambda: |
|
||||
const auto &timers = id(va).get_timers();
|
||||
bool output = false;
|
||||
for (const auto &timer : timers) {
|
||||
if (timer.second.is_active) { output = true; }
|
||||
}
|
||||
id(global_is_timer) = output;
|
||||
|
||||
- id: draw_timer_timeline
|
||||
then:
|
||||
- lambda: |
|
||||
id(check_if_timers_active).execute();
|
||||
id(check_if_timers).execute();
|
||||
if (id(global_is_timer_active)){
|
||||
id(fetch_first_active_timer).execute();
|
||||
int active_pixels = round( 320 * id(global_first_active_timer).seconds_left / max(id(global_first_active_timer).total_seconds, static_cast<uint32_t>(1)) );
|
||||
if (active_pixels > 0){
|
||||
id(s3_box_lcd).filled_rectangle(0, 225, 320, 15, Color::WHITE);
|
||||
id(s3_box_lcd).filled_rectangle(0, 226, active_pixels, 13, id(active_timer_color));
|
||||
}
|
||||
} else if (id(global_is_timer)){
|
||||
id(fetch_first_timer).execute();
|
||||
int active_pixels = round( 320 * id(global_first_timer).seconds_left / max(id(global_first_timer).total_seconds, static_cast<uint32_t>(1)));
|
||||
if (active_pixels > 0){
|
||||
id(s3_box_lcd).filled_rectangle(0, 225, 320, 15, Color::WHITE);
|
||||
id(s3_box_lcd).filled_rectangle(0, 226, active_pixels, 13, id(paused_timer_color));
|
||||
}
|
||||
}
|
||||
|
||||
- id: draw_active_timer_widget
|
||||
then:
|
||||
- lambda: |
|
||||
id(check_if_timers_active).execute();
|
||||
if (id(global_is_timer_active)){
|
||||
id(s3_box_lcd).filled_rectangle(80, 40, 160, 50, Color::WHITE);
|
||||
id(s3_box_lcd).rectangle(80, 40, 160, 50, Color::BLACK);
|
||||
id(fetch_first_active_timer).execute();
|
||||
int hours_left = floor(id(global_first_active_timer).seconds_left / 3600);
|
||||
int minutes_left = floor((id(global_first_active_timer).seconds_left - hours_left * 3600) / 60);
|
||||
int seconds_left = id(global_first_active_timer).seconds_left - hours_left * 3600 - minutes_left * 60;
|
||||
auto display_hours = (hours_left < 10 ? "0" : "") + std::to_string(hours_left);
|
||||
auto display_minute = (minutes_left < 10 ? "0" : "") + std::to_string(minutes_left);
|
||||
auto display_seconds = (seconds_left < 10 ? "0" : "") + std::to_string(seconds_left);
|
||||
std::string display_string = "";
|
||||
if (hours_left > 0) {
|
||||
display_string = display_hours + ":" + display_minute;
|
||||
} else {
|
||||
display_string = display_minute + ":" + display_seconds;
|
||||
}
|
||||
id(s3_box_lcd).printf(120, 47, id(font_timer), Color::BLACK, "%s", display_string.c_str());
|
||||
}
|
||||
|
||||
- id: start_wake_word
|
||||
then:
|
||||
- if:
|
||||
condition:
|
||||
and:
|
||||
- not:
|
||||
- voice_assistant.is_running:
|
||||
- lambda: return id(wake_word_engine_location).current_option() == "On device";
|
||||
then:
|
||||
- lambda: id(va).set_use_wake_word(false);
|
||||
- micro_wake_word.start:
|
||||
- if:
|
||||
condition:
|
||||
and:
|
||||
- not:
|
||||
- voice_assistant.is_running:
|
||||
- lambda: return id(wake_word_engine_location).current_option() == "In Home Assistant";
|
||||
then:
|
||||
- lambda: id(va).set_use_wake_word(true);
|
||||
- voice_assistant.start_continuous:
|
||||
|
||||
- id: stop_wake_word
|
||||
then:
|
||||
- if:
|
||||
condition:
|
||||
lambda: return id(wake_word_engine_location).current_option() == "In Home Assistant";
|
||||
then:
|
||||
- lambda: id(va).set_use_wake_word(false);
|
||||
- voice_assistant.stop:
|
||||
- if:
|
||||
condition:
|
||||
lambda: return id(wake_word_engine_location).current_option() == "On device";
|
||||
then:
|
||||
- micro_wake_word.stop:
|
||||
|
||||
- id: set_idle_or_mute_phase
|
||||
then:
|
||||
- if:
|
||||
condition:
|
||||
switch.is_off: mute
|
||||
then:
|
||||
- lambda: id(voice_assistant_phase) = ${voice_assist_idle_phase_id};
|
||||
else:
|
||||
- lambda: id(voice_assistant_phase) = ${voice_assist_muted_phase_id};
|
||||
|
||||
# --- Switches ---
|
||||
|
||||
switch:
|
||||
- platform: gpio
|
||||
name: Speaker Enable
|
||||
pin:
|
||||
number: GPIO46
|
||||
ignore_strapping_warning: true
|
||||
restore_mode: RESTORE_DEFAULT_ON
|
||||
entity_category: config
|
||||
disabled_by_default: true
|
||||
- platform: template
|
||||
name: Mute
|
||||
id: mute
|
||||
icon: "mdi:microphone-off"
|
||||
optimistic: true
|
||||
restore_mode: RESTORE_DEFAULT_OFF
|
||||
entity_category: config
|
||||
on_turn_off:
|
||||
- microphone.unmute:
|
||||
- lambda: id(voice_assistant_phase) = ${voice_assist_idle_phase_id};
|
||||
- script.execute: draw_display
|
||||
on_turn_on:
|
||||
- microphone.mute:
|
||||
- lambda: id(voice_assistant_phase) = ${voice_assist_muted_phase_id};
|
||||
- script.execute: draw_display
|
||||
- platform: template
|
||||
id: timer_ringing
|
||||
optimistic: true
|
||||
internal: true
|
||||
restore_mode: ALWAYS_OFF
|
||||
on_turn_off:
|
||||
- lambda: |-
|
||||
id(speaker_media_player)
|
||||
->make_call()
|
||||
.set_command(media_player::MediaPlayerCommand::MEDIA_PLAYER_COMMAND_REPEAT_OFF)
|
||||
.set_announcement(true)
|
||||
.perform();
|
||||
id(speaker_media_player)->set_playlist_delay_ms(speaker::AudioPipelineType::ANNOUNCEMENT, 0);
|
||||
- media_player.stop:
|
||||
announcement: true
|
||||
on_turn_on:
|
||||
- lambda: |-
|
||||
id(speaker_media_player)
|
||||
->make_call()
|
||||
.set_command(media_player::MediaPlayerCommand::MEDIA_PLAYER_COMMAND_REPEAT_ONE)
|
||||
.set_announcement(true)
|
||||
.perform();
|
||||
id(speaker_media_player)->set_playlist_delay_ms(speaker::AudioPipelineType::ANNOUNCEMENT, 1000);
|
||||
- media_player.speaker.play_on_device_media_file:
|
||||
media_file: timer_finished_sound
|
||||
announcement: true
|
||||
- delay: 15min
|
||||
- switch.turn_off: timer_ringing
|
||||
|
||||
# --- Wake word engine location selector ---
|
||||
|
||||
select:
|
||||
- platform: template
|
||||
entity_category: config
|
||||
name: Wake word engine location
|
||||
id: wake_word_engine_location
|
||||
icon: "mdi:account-voice"
|
||||
optimistic: true
|
||||
restore_value: true
|
||||
options:
|
||||
- In Home Assistant
|
||||
- On device
|
||||
initial_option: On device
|
||||
on_value:
|
||||
- if:
|
||||
condition:
|
||||
lambda: return !id(init_in_progress);
|
||||
then:
|
||||
- wait_until:
|
||||
lambda: return id(voice_assistant_phase) == ${voice_assist_muted_phase_id} || id(voice_assistant_phase) == ${voice_assist_idle_phase_id};
|
||||
- if:
|
||||
condition:
|
||||
lambda: return x == "In Home Assistant";
|
||||
then:
|
||||
- micro_wake_word.stop
|
||||
- delay: 500ms
|
||||
- if:
|
||||
condition:
|
||||
switch.is_off: mute
|
||||
then:
|
||||
- lambda: id(va).set_use_wake_word(true);
|
||||
- voice_assistant.start_continuous:
|
||||
- if:
|
||||
condition:
|
||||
lambda: return x == "On device";
|
||||
then:
|
||||
- lambda: id(va).set_use_wake_word(false);
|
||||
- voice_assistant.stop
|
||||
- delay: 500ms
|
||||
- if:
|
||||
condition:
|
||||
switch.is_off: mute
|
||||
then:
|
||||
- micro_wake_word.start
|
||||
|
||||
# --- Global variables ---
|
||||
|
||||
globals:
|
||||
- id: init_in_progress
|
||||
type: bool
|
||||
restore_value: false
|
||||
initial_value: "true"
|
||||
- id: voice_assistant_phase
|
||||
type: int
|
||||
restore_value: false
|
||||
initial_value: ${voice_assist_not_ready_phase_id}
|
||||
- id: global_first_active_timer
|
||||
type: voice_assistant::Timer
|
||||
restore_value: false
|
||||
- id: global_is_timer_active
|
||||
type: bool
|
||||
restore_value: false
|
||||
- id: global_first_timer
|
||||
type: voice_assistant::Timer
|
||||
restore_value: false
|
||||
- id: global_is_timer
|
||||
type: bool
|
||||
restore_value: false
|
||||
|
||||
# --- Display images ---
|
||||
|
||||
image:
|
||||
- file: ${error_illustration_file}
|
||||
id: casita_error
|
||||
resize: 320x240
|
||||
type: RGB
|
||||
transparency: alpha_channel
|
||||
- file: ${idle_illustration_file}
|
||||
id: casita_idle
|
||||
resize: 320x240
|
||||
type: RGB
|
||||
transparency: alpha_channel
|
||||
- file: ${listening_illustration_file}
|
||||
id: casita_listening
|
||||
resize: 320x240
|
||||
type: RGB
|
||||
transparency: alpha_channel
|
||||
- file: ${thinking_illustration_file}
|
||||
id: casita_thinking
|
||||
resize: 320x240
|
||||
type: RGB
|
||||
transparency: alpha_channel
|
||||
- file: ${replying_illustration_file}
|
||||
id: casita_replying
|
||||
resize: 320x240
|
||||
type: RGB
|
||||
transparency: alpha_channel
|
||||
- file: ${timer_finished_illustration_file}
|
||||
id: casita_timer_finished
|
||||
resize: 320x240
|
||||
type: RGB
|
||||
transparency: alpha_channel
|
||||
- file: ${loading_illustration_file}
|
||||
id: casita_initializing
|
||||
resize: 320x240
|
||||
type: RGB
|
||||
transparency: alpha_channel
|
||||
- file: https://github.com/esphome/wake-word-voice-assistants/raw/main/error_box_illustrations/error-no-wifi.png
|
||||
id: error_no_wifi
|
||||
resize: 320x240
|
||||
type: RGB
|
||||
transparency: alpha_channel
|
||||
- file: https://github.com/esphome/wake-word-voice-assistants/raw/main/error_box_illustrations/error-no-ha.png
|
||||
id: error_no_ha
|
||||
resize: 320x240
|
||||
type: RGB
|
||||
transparency: alpha_channel
|
||||
|
||||
# --- Fonts ---
|
||||
|
||||
font:
|
||||
- file:
|
||||
type: gfonts
|
||||
family: ${font_family}
|
||||
weight: 300
|
||||
italic: true
|
||||
id: font_request
|
||||
size: 15
|
||||
glyphsets:
|
||||
- ${font_glyphsets}
|
||||
- file:
|
||||
type: gfonts
|
||||
family: ${font_family}
|
||||
weight: 300
|
||||
id: font_response
|
||||
size: 15
|
||||
glyphsets:
|
||||
- ${font_glyphsets}
|
||||
- file:
|
||||
type: gfonts
|
||||
family: ${font_family}
|
||||
weight: 300
|
||||
id: font_timer
|
||||
size: 30
|
||||
glyphsets:
|
||||
- ${font_glyphsets}
|
||||
|
||||
# --- Text sensors (request/response display) ---
|
||||
|
||||
text_sensor:
|
||||
- id: text_request
|
||||
platform: template
|
||||
on_value:
|
||||
lambda: |-
|
||||
if(id(text_request).state.length()>32) {
|
||||
std::string name = id(text_request).state.c_str();
|
||||
std::string truncated = esphome::str_truncate(name.c_str(),31);
|
||||
id(text_request).state = (truncated+"...").c_str();
|
||||
}
|
||||
- id: text_response
|
||||
platform: template
|
||||
on_value:
|
||||
lambda: |-
|
||||
if(id(text_response).state.length()>32) {
|
||||
std::string name = id(text_response).state.c_str();
|
||||
std::string truncated = esphome::str_truncate(name.c_str(),31);
|
||||
id(text_response).state = (truncated+"...").c_str();
|
||||
}
|
||||
|
||||
# --- Colors ---
|
||||
|
||||
color:
|
||||
- id: idle_color
|
||||
hex: ${idle_illustration_background_color}
|
||||
- id: listening_color
|
||||
hex: ${listening_illustration_background_color}
|
||||
- id: thinking_color
|
||||
hex: ${thinking_illustration_background_color}
|
||||
- id: replying_color
|
||||
hex: ${replying_illustration_background_color}
|
||||
- id: loading_color
|
||||
hex: ${loading_illustration_background_color}
|
||||
- id: error_color
|
||||
hex: ${error_illustration_background_color}
|
||||
- id: active_timer_color
|
||||
hex: "26ed3a"
|
||||
- id: paused_timer_color
|
||||
hex: "3b89e3"
|
||||
|
||||
# --- SPI + Display ---
|
||||
|
||||
spi:
|
||||
- id: spi_bus
|
||||
clk_pin: 7
|
||||
mosi_pin: 6
|
||||
|
||||
display:
|
||||
- platform: ili9xxx
|
||||
id: s3_box_lcd
|
||||
model: S3BOX
|
||||
invert_colors: false
|
||||
data_rate: 40MHz
|
||||
cs_pin: 5
|
||||
dc_pin: 4
|
||||
reset_pin:
|
||||
number: 48
|
||||
inverted: true
|
||||
update_interval: never
|
||||
pages:
|
||||
- id: idle_page
|
||||
lambda: |-
|
||||
it.fill(id(idle_color));
|
||||
it.image((it.get_width() / 2), (it.get_height() / 2), id(casita_idle), ImageAlign::CENTER);
|
||||
id(draw_timer_timeline).execute();
|
||||
id(draw_active_timer_widget).execute();
|
||||
- id: listening_page
|
||||
lambda: |-
|
||||
it.fill(id(listening_color));
|
||||
it.image((it.get_width() / 2), (it.get_height() / 2), id(casita_listening), ImageAlign::CENTER);
|
||||
id(draw_timer_timeline).execute();
|
||||
- id: thinking_page
|
||||
lambda: |-
|
||||
it.fill(id(thinking_color));
|
||||
it.image((it.get_width() / 2), (it.get_height() / 2), id(casita_thinking), ImageAlign::CENTER);
|
||||
it.filled_rectangle(20, 20, 280, 30, Color::WHITE);
|
||||
it.rectangle(20, 20, 280, 30, Color::BLACK);
|
||||
it.printf(30, 25, id(font_request), Color::BLACK, "%s", id(text_request).state.c_str());
|
||||
id(draw_timer_timeline).execute();
|
||||
- id: replying_page
|
||||
lambda: |-
|
||||
it.fill(id(replying_color));
|
||||
it.image((it.get_width() / 2), (it.get_height() / 2), id(casita_replying), ImageAlign::CENTER);
|
||||
it.filled_rectangle(20, 20, 280, 30, Color::WHITE);
|
||||
it.rectangle(20, 20, 280, 30, Color::BLACK);
|
||||
it.filled_rectangle(20, 190, 280, 30, Color::WHITE);
|
||||
it.rectangle(20, 190, 280, 30, Color::BLACK);
|
||||
it.printf(30, 25, id(font_request), Color::BLACK, "%s", id(text_request).state.c_str());
|
||||
it.printf(30, 195, id(font_response), Color::BLACK, "%s", id(text_response).state.c_str());
|
||||
id(draw_timer_timeline).execute();
|
||||
- id: timer_finished_page
|
||||
lambda: |-
|
||||
it.fill(id(idle_color));
|
||||
it.image((it.get_width() / 2), (it.get_height() / 2), id(casita_timer_finished), ImageAlign::CENTER);
|
||||
- id: error_page
|
||||
lambda: |-
|
||||
it.fill(id(error_color));
|
||||
it.image((it.get_width() / 2), (it.get_height() / 2), id(casita_error), ImageAlign::CENTER);
|
||||
- id: no_ha_page
|
||||
lambda: |-
|
||||
it.image((it.get_width() / 2), (it.get_height() / 2), id(error_no_ha), ImageAlign::CENTER);
|
||||
- id: no_wifi_page
|
||||
lambda: |-
|
||||
it.image((it.get_width() / 2), (it.get_height() / 2), id(error_no_wifi), ImageAlign::CENTER);
|
||||
- id: initializing_page
|
||||
lambda: |-
|
||||
it.fill(id(loading_color));
|
||||
it.image((it.get_width() / 2), (it.get_height() / 2), id(casita_initializing), ImageAlign::CENTER);
|
||||
- id: muted_page
|
||||
lambda: |-
|
||||
it.fill(Color::BLACK);
|
||||
id(draw_timer_timeline).execute();
|
||||
id(draw_active_timer_widget).execute();
|
||||
BIN
homeai-esp32/esphome/illustrations/error.png
Normal file
BIN
homeai-esp32/esphome/illustrations/error.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 76 KiB |
BIN
homeai-esp32/esphome/illustrations/idle.png
Normal file
BIN
homeai-esp32/esphome/illustrations/idle.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 90 KiB |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user