feat: add homeai-desktop web assistant with LAN access
Adds the desktop web assistant app (Vite + React) with OpenClaw bridge proxy and exposes it on the local network (host: 0.0.0.0, port 5174). Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2
homeai-desktop/.gitignore
vendored
Normal file
2
homeai-desktop/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules/
|
||||
dist/
|
||||
15
homeai-desktop/index.html
Normal file
15
homeai-desktop/index.html
Normal file
@@ -0,0 +1,15 @@
|
||||
<!doctype html>
|
||||
<html lang="en" class="dark">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/icon.svg" />
|
||||
<link rel="manifest" href="/manifest.json" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta name="theme-color" content="#030712" />
|
||||
<title>HomeAI Assistant</title>
|
||||
</head>
|
||||
<body class="bg-gray-950 text-gray-100">
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.jsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
31
homeai-desktop/launchd/com.homeai.desktop-assistant.plist
Normal file
31
homeai-desktop/launchd/com.homeai.desktop-assistant.plist
Normal file
@@ -0,0 +1,31 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.homeai.desktop-assistant</string>
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/opt/homebrew/bin/npx</string>
|
||||
<string>vite</string>
|
||||
<string>--host</string>
|
||||
<string>--port</string>
|
||||
<string>5174</string>
|
||||
</array>
|
||||
<key>WorkingDirectory</key>
|
||||
<string>/Users/aodhan/gitea/homeai/homeai-desktop</string>
|
||||
<key>RunAtLoad</key>
|
||||
<true/>
|
||||
<key>KeepAlive</key>
|
||||
<true/>
|
||||
<key>StandardOutPath</key>
|
||||
<string>/tmp/homeai-desktop-assistant.log</string>
|
||||
<key>StandardErrorPath</key>
|
||||
<string>/tmp/homeai-desktop-assistant-error.log</string>
|
||||
<key>EnvironmentVariables</key>
|
||||
<dict>
|
||||
<key>PATH</key>
|
||||
<string>/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin</string>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
||||
2117
homeai-desktop/package-lock.json
generated
Normal file
2117
homeai-desktop/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
24
homeai-desktop/package.json
Normal file
24
homeai-desktop/package.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"name": "homeai-desktop",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tailwindcss/vite": "^4.2.1",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"tailwindcss": "^4.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@vitejs/plugin-react": "^5.1.1",
|
||||
"vite": "^8.0.0-beta.13"
|
||||
},
|
||||
"overrides": {
|
||||
"vite": "^8.0.0-beta.13"
|
||||
}
|
||||
}
|
||||
9
homeai-desktop/public/icon.svg
Normal file
9
homeai-desktop/public/icon.svg
Normal file
@@ -0,0 +1,9 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 64 64">
|
||||
<rect width="64" height="64" rx="14" fill="#030712"/>
|
||||
<circle cx="32" cy="28" r="12" fill="none" stroke="#818cf8" stroke-width="2.5"/>
|
||||
<path d="M26 26c0-3.3 2.7-6 6-6s6 2.7 6 6" fill="none" stroke="#818cf8" stroke-width="2" stroke-linecap="round"/>
|
||||
<rect x="30" y="40" width="4" height="8" rx="2" fill="#818cf8"/>
|
||||
<path d="M24 52h16" stroke="#818cf8" stroke-width="2.5" stroke-linecap="round"/>
|
||||
<circle cx="29" cy="27" r="1.5" fill="#34d399"/>
|
||||
<circle cx="35" cy="27" r="1.5" fill="#34d399"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 575 B |
16
homeai-desktop/public/manifest.json
Normal file
16
homeai-desktop/public/manifest.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"name": "HomeAI Assistant",
|
||||
"short_name": "HomeAI",
|
||||
"description": "Desktop AI assistant powered by local LLMs",
|
||||
"start_url": "/",
|
||||
"display": "standalone",
|
||||
"background_color": "#030712",
|
||||
"theme_color": "#030712",
|
||||
"icons": [
|
||||
{
|
||||
"src": "/icon.svg",
|
||||
"sizes": "any",
|
||||
"type": "image/svg+xml"
|
||||
}
|
||||
]
|
||||
}
|
||||
115
homeai-desktop/src/App.jsx
Normal file
115
homeai-desktop/src/App.jsx
Normal file
@@ -0,0 +1,115 @@
|
||||
import { useState, useEffect, useCallback } from 'react'
|
||||
import ChatPanel from './components/ChatPanel'
|
||||
import InputBar from './components/InputBar'
|
||||
import StatusIndicator from './components/StatusIndicator'
|
||||
import SettingsDrawer from './components/SettingsDrawer'
|
||||
import { useSettings } from './hooks/useSettings'
|
||||
import { useBridgeHealth } from './hooks/useBridgeHealth'
|
||||
import { useChat } from './hooks/useChat'
|
||||
import { useTtsPlayback } from './hooks/useTtsPlayback'
|
||||
import { useVoiceInput } from './hooks/useVoiceInput'
|
||||
|
||||
export default function App() {
|
||||
const { settings, updateSetting } = useSettings()
|
||||
const isOnline = useBridgeHealth()
|
||||
const { messages, isLoading, send, clearHistory } = useChat()
|
||||
const { isPlaying, speak, stop } = useTtsPlayback(settings.voice)
|
||||
const { isRecording, isTranscribing, startRecording, stopRecording } = useVoiceInput(settings.sttMode)
|
||||
const [settingsOpen, setSettingsOpen] = useState(false)
|
||||
|
||||
// Send a message and optionally speak the response
|
||||
const handleSend = useCallback(async (text) => {
|
||||
const response = await send(text)
|
||||
if (response && settings.autoTts) {
|
||||
speak(response)
|
||||
}
|
||||
}, [send, settings.autoTts, speak])
|
||||
|
||||
// Toggle voice recording
|
||||
const handleVoiceToggle = useCallback(async () => {
|
||||
if (isRecording) {
|
||||
const text = await stopRecording()
|
||||
if (text) {
|
||||
handleSend(text)
|
||||
}
|
||||
} else {
|
||||
startRecording()
|
||||
}
|
||||
}, [isRecording, stopRecording, startRecording, handleSend])
|
||||
|
||||
// Space bar push-to-talk when input not focused
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (e) => {
|
||||
if (e.code === 'Space' && e.target.tagName !== 'TEXTAREA' && e.target.tagName !== 'INPUT') {
|
||||
e.preventDefault()
|
||||
handleVoiceToggle()
|
||||
}
|
||||
}
|
||||
window.addEventListener('keydown', handleKeyDown)
|
||||
return () => window.removeEventListener('keydown', handleKeyDown)
|
||||
}, [handleVoiceToggle])
|
||||
|
||||
return (
|
||||
<div className="h-screen flex flex-col bg-gray-950">
|
||||
{/* Status bar */}
|
||||
<header className="flex items-center justify-between px-4 py-2 border-b border-gray-800/50">
|
||||
<div className="flex items-center gap-2">
|
||||
<StatusIndicator isOnline={isOnline} />
|
||||
<span className="text-xs text-gray-500">
|
||||
{isOnline === null ? 'Connecting...' : isOnline ? 'Connected' : 'Offline'}
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
{messages.length > 0 && (
|
||||
<button
|
||||
onClick={clearHistory}
|
||||
className="text-xs text-gray-500 hover:text-gray-300 transition-colors px-2 py-1"
|
||||
title="Clear conversation"
|
||||
>
|
||||
Clear
|
||||
</button>
|
||||
)}
|
||||
{isPlaying && (
|
||||
<button
|
||||
onClick={stop}
|
||||
className="text-xs text-indigo-400 hover:text-indigo-300 transition-colors px-2 py-1"
|
||||
title="Stop speaking"
|
||||
>
|
||||
Stop audio
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
onClick={() => setSettingsOpen(true)}
|
||||
className="text-gray-500 hover:text-gray-300 transition-colors p-1"
|
||||
title="Settings"
|
||||
>
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={1.5}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M9.594 3.94c.09-.542.56-.94 1.11-.94h2.593c.55 0 1.02.398 1.11.94l.213 1.281c.063.374.313.686.645.87.074.04.147.083.22.127.325.196.72.257 1.075.124l1.217-.456a1.125 1.125 0 011.37.49l1.296 2.247a1.125 1.125 0 01-.26 1.431l-1.003.827c-.293.241-.438.613-.43.992a7.723 7.723 0 010 .255c-.008.378.137.75.43.991l1.004.827c.424.35.534.955.26 1.43l-1.298 2.247a1.125 1.125 0 01-1.369.491l-1.217-.456c-.355-.133-.75-.072-1.076.124a6.47 6.47 0 01-.22.128c-.331.183-.581.495-.644.869l-.213 1.281c-.09.543-.56.941-1.11.941h-2.594c-.55 0-1.019-.398-1.11-.94l-.213-1.281c-.062-.374-.312-.686-.644-.87a6.52 6.52 0 01-.22-.127c-.325-.196-.72-.257-1.076-.124l-1.217.456a1.125 1.125 0 01-1.369-.49l-1.297-2.247a1.125 1.125 0 01.26-1.431l1.004-.827c.292-.24.437-.613.43-.991a6.932 6.932 0 010-.255c.007-.38-.138-.751-.43-.992l-1.004-.827a1.125 1.125 0 01-.26-1.43l1.297-2.247a1.125 1.125 0 011.37-.491l1.216.456c.356.133.751.072 1.076-.124.072-.044.146-.086.22-.128.332-.183.582-.495.644-.869l.214-1.28z" />
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M15 12a3 3 0 11-6 0 3 3 0 016 0z" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
{/* Chat area */}
|
||||
<ChatPanel messages={messages} isLoading={isLoading} onReplay={speak} />
|
||||
|
||||
{/* Input */}
|
||||
<InputBar
|
||||
onSend={handleSend}
|
||||
onVoiceToggle={handleVoiceToggle}
|
||||
isLoading={isLoading}
|
||||
isRecording={isRecording}
|
||||
isTranscribing={isTranscribing}
|
||||
/>
|
||||
|
||||
{/* Settings drawer */}
|
||||
<SettingsDrawer
|
||||
isOpen={settingsOpen}
|
||||
onClose={() => setSettingsOpen(false)}
|
||||
settings={settings}
|
||||
onUpdate={updateSetting}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
35
homeai-desktop/src/components/ChatPanel.jsx
Normal file
35
homeai-desktop/src/components/ChatPanel.jsx
Normal file
@@ -0,0 +1,35 @@
|
||||
import { useEffect, useRef } from 'react'
|
||||
import MessageBubble from './MessageBubble'
|
||||
import ThinkingIndicator from './ThinkingIndicator'
|
||||
|
||||
export default function ChatPanel({ messages, isLoading, onReplay }) {
|
||||
const bottomRef = useRef(null)
|
||||
|
||||
useEffect(() => {
|
||||
bottomRef.current?.scrollIntoView({ behavior: 'smooth' })
|
||||
}, [messages, isLoading])
|
||||
|
||||
if (messages.length === 0 && !isLoading) {
|
||||
return (
|
||||
<div className="flex-1 flex items-center justify-center">
|
||||
<div className="text-center">
|
||||
<div className="w-16 h-16 rounded-full bg-indigo-600/20 flex items-center justify-center mx-auto mb-4">
|
||||
<span className="text-indigo-400 text-2xl">AI</span>
|
||||
</div>
|
||||
<h2 className="text-xl font-medium text-gray-200 mb-2">Hi, I'm Aria</h2>
|
||||
<p className="text-gray-500 text-sm">Type a message or press the mic to talk</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex-1 overflow-y-auto py-4">
|
||||
{messages.map((msg) => (
|
||||
<MessageBubble key={msg.id} message={msg} onReplay={onReplay} />
|
||||
))}
|
||||
{isLoading && <ThinkingIndicator />}
|
||||
<div ref={bottomRef} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
53
homeai-desktop/src/components/InputBar.jsx
Normal file
53
homeai-desktop/src/components/InputBar.jsx
Normal file
@@ -0,0 +1,53 @@
|
||||
import { useState, useRef } from 'react'
|
||||
import VoiceButton from './VoiceButton'
|
||||
|
||||
export default function InputBar({ onSend, onVoiceToggle, isLoading, isRecording, isTranscribing }) {
|
||||
const [text, setText] = useState('')
|
||||
const inputRef = useRef(null)
|
||||
|
||||
const handleSubmit = (e) => {
|
||||
e.preventDefault()
|
||||
if (!text.trim() || isLoading) return
|
||||
onSend(text)
|
||||
setText('')
|
||||
}
|
||||
|
||||
const handleKeyDown = (e) => {
|
||||
if (e.key === 'Enter' && !e.shiftKey) {
|
||||
e.preventDefault()
|
||||
handleSubmit(e)
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<form onSubmit={handleSubmit} className="border-t border-gray-800 bg-gray-950 px-4 py-3">
|
||||
<div className="flex items-end gap-2 max-w-3xl mx-auto">
|
||||
<VoiceButton
|
||||
isRecording={isRecording}
|
||||
isTranscribing={isTranscribing}
|
||||
onToggle={onVoiceToggle}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<textarea
|
||||
ref={inputRef}
|
||||
value={text}
|
||||
onChange={(e) => setText(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
placeholder="Type a message..."
|
||||
rows={1}
|
||||
className="flex-1 bg-gray-800 text-gray-100 rounded-xl px-4 py-2.5 text-sm resize-none placeholder-gray-500 focus:outline-none focus:ring-1 focus:ring-indigo-500 min-h-[42px] max-h-32"
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<button
|
||||
type="submit"
|
||||
disabled={!text.trim() || isLoading}
|
||||
className="w-10 h-10 rounded-full bg-indigo-600 text-white flex items-center justify-center shrink-0 hover:bg-indigo-500 disabled:opacity-40 disabled:hover:bg-indigo-600 transition-colors"
|
||||
>
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M6 12L3.269 3.126A59.768 59.768 0 0121.485 12 59.77 59.77 0 013.27 20.876L5.999 12zm0 0h7.5" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
39
homeai-desktop/src/components/MessageBubble.jsx
Normal file
39
homeai-desktop/src/components/MessageBubble.jsx
Normal file
@@ -0,0 +1,39 @@
|
||||
export default function MessageBubble({ message, onReplay }) {
|
||||
const isUser = message.role === 'user'
|
||||
|
||||
return (
|
||||
<div className={`flex ${isUser ? 'justify-end' : 'justify-start'} px-4 py-1.5`}>
|
||||
<div className={`flex items-start gap-3 max-w-[80%] ${isUser ? 'flex-row-reverse' : ''}`}>
|
||||
{!isUser && (
|
||||
<div className="w-8 h-8 rounded-full bg-indigo-600/20 flex items-center justify-center shrink-0 mt-0.5">
|
||||
<span className="text-indigo-400 text-sm">AI</span>
|
||||
</div>
|
||||
)}
|
||||
<div>
|
||||
<div
|
||||
className={`rounded-2xl px-4 py-2.5 text-sm leading-relaxed whitespace-pre-wrap ${
|
||||
isUser
|
||||
? 'bg-indigo-600 text-white'
|
||||
: message.isError
|
||||
? 'bg-red-900/40 text-red-200 border border-red-800/50'
|
||||
: 'bg-gray-800 text-gray-100'
|
||||
}`}
|
||||
>
|
||||
{message.content}
|
||||
</div>
|
||||
{!isUser && !message.isError && onReplay && (
|
||||
<button
|
||||
onClick={() => onReplay(message.content)}
|
||||
className="mt-1 ml-1 text-gray-500 hover:text-indigo-400 transition-colors"
|
||||
title="Replay audio"
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M15.536 8.464a5 5 0 010 7.072M17.95 6.05a8 8 0 010 11.9M6.5 9H4a1 1 0 00-1 1v4a1 1 0 001 1h2.5l4 4V5l-4 4z" />
|
||||
</svg>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
74
homeai-desktop/src/components/SettingsDrawer.jsx
Normal file
74
homeai-desktop/src/components/SettingsDrawer.jsx
Normal file
@@ -0,0 +1,74 @@
|
||||
import { VOICES } from '../lib/constants'
|
||||
|
||||
export default function SettingsDrawer({ isOpen, onClose, settings, onUpdate }) {
|
||||
if (!isOpen) return null
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="fixed inset-0 bg-black/50 z-40" onClick={onClose} />
|
||||
<div className="fixed right-0 top-0 bottom-0 w-80 bg-gray-900 border-l border-gray-800 z-50 flex flex-col">
|
||||
<div className="flex items-center justify-between px-4 py-3 border-b border-gray-800">
|
||||
<h2 className="text-sm font-medium text-gray-200">Settings</h2>
|
||||
<button onClick={onClose} className="text-gray-500 hover:text-gray-300">
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M6 18L18 6M6 6l12 12" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
<div className="flex-1 overflow-y-auto p-4 space-y-5">
|
||||
{/* Voice */}
|
||||
<div>
|
||||
<label className="block text-xs font-medium text-gray-400 mb-1.5">Voice</label>
|
||||
<select
|
||||
value={settings.voice}
|
||||
onChange={(e) => onUpdate('voice', e.target.value)}
|
||||
className="w-full bg-gray-800 text-gray-200 text-sm rounded-lg px-3 py-2 border border-gray-700 focus:outline-none focus:border-indigo-500"
|
||||
>
|
||||
{VOICES.map((v) => (
|
||||
<option key={v.id} value={v.id}>{v.label}</option>
|
||||
))}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
{/* Auto TTS */}
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<div className="text-sm text-gray-200">Auto-speak responses</div>
|
||||
<div className="text-xs text-gray-500">Speak assistant replies aloud</div>
|
||||
</div>
|
||||
<button
|
||||
onClick={() => onUpdate('autoTts', !settings.autoTts)}
|
||||
className={`relative w-10 h-6 rounded-full transition-colors ${
|
||||
settings.autoTts ? 'bg-indigo-600' : 'bg-gray-700'
|
||||
}`}
|
||||
>
|
||||
<span
|
||||
className={`absolute top-0.5 left-0.5 w-5 h-5 rounded-full bg-white transition-transform ${
|
||||
settings.autoTts ? 'translate-x-4' : ''
|
||||
}`}
|
||||
/>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* STT Mode */}
|
||||
<div>
|
||||
<label className="block text-xs font-medium text-gray-400 mb-1.5">Speech recognition</label>
|
||||
<select
|
||||
value={settings.sttMode}
|
||||
onChange={(e) => onUpdate('sttMode', e.target.value)}
|
||||
className="w-full bg-gray-800 text-gray-200 text-sm rounded-lg px-3 py-2 border border-gray-700 focus:outline-none focus:border-indigo-500"
|
||||
>
|
||||
<option value="bridge">Wyoming STT (local)</option>
|
||||
<option value="webspeech">Web Speech API (browser)</option>
|
||||
</select>
|
||||
<p className="text-xs text-gray-500 mt-1">
|
||||
{settings.sttMode === 'bridge'
|
||||
? 'Uses Whisper via the local bridge server'
|
||||
: 'Uses browser built-in speech recognition'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
11
homeai-desktop/src/components/StatusIndicator.jsx
Normal file
11
homeai-desktop/src/components/StatusIndicator.jsx
Normal file
@@ -0,0 +1,11 @@
|
||||
export default function StatusIndicator({ isOnline }) {
|
||||
if (isOnline === null) {
|
||||
return <span className="inline-block w-2.5 h-2.5 rounded-full bg-gray-500 animate-pulse" title="Checking..." />
|
||||
}
|
||||
return (
|
||||
<span
|
||||
className={`inline-block w-2.5 h-2.5 rounded-full ${isOnline ? 'bg-emerald-400' : 'bg-red-400'}`}
|
||||
title={isOnline ? 'Bridge online' : 'Bridge offline'}
|
||||
/>
|
||||
)
|
||||
}
|
||||
14
homeai-desktop/src/components/ThinkingIndicator.jsx
Normal file
14
homeai-desktop/src/components/ThinkingIndicator.jsx
Normal file
@@ -0,0 +1,14 @@
|
||||
export default function ThinkingIndicator() {
|
||||
return (
|
||||
<div className="flex items-start gap-3 px-4 py-3">
|
||||
<div className="w-8 h-8 rounded-full bg-indigo-600/20 flex items-center justify-center shrink-0">
|
||||
<span className="text-indigo-400 text-sm">AI</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-1 pt-2.5">
|
||||
<span className="w-2 h-2 rounded-full bg-gray-400 animate-[bounce_1.4s_ease-in-out_infinite]" />
|
||||
<span className="w-2 h-2 rounded-full bg-gray-400 animate-[bounce_1.4s_ease-in-out_0.2s_infinite]" />
|
||||
<span className="w-2 h-2 rounded-full bg-gray-400 animate-[bounce_1.4s_ease-in-out_0.4s_infinite]" />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
32
homeai-desktop/src/components/VoiceButton.jsx
Normal file
32
homeai-desktop/src/components/VoiceButton.jsx
Normal file
@@ -0,0 +1,32 @@
|
||||
export default function VoiceButton({ isRecording, isTranscribing, onToggle, disabled }) {
|
||||
const handleClick = () => {
|
||||
if (disabled || isTranscribing) return
|
||||
onToggle()
|
||||
}
|
||||
|
||||
return (
|
||||
<button
|
||||
onClick={handleClick}
|
||||
disabled={disabled || isTranscribing}
|
||||
className={`w-10 h-10 rounded-full flex items-center justify-center transition-all shrink-0 ${
|
||||
isRecording
|
||||
? 'bg-red-500 text-white shadow-[0_0_0_4px_rgba(239,68,68,0.3)] animate-pulse'
|
||||
: isTranscribing
|
||||
? 'bg-gray-700 text-gray-400 cursor-wait'
|
||||
: 'bg-gray-800 text-gray-400 hover:bg-gray-700 hover:text-gray-200'
|
||||
}`}
|
||||
title={isRecording ? 'Stop recording' : isTranscribing ? 'Transcribing...' : 'Start recording (Space)'}
|
||||
>
|
||||
{isTranscribing ? (
|
||||
<svg className="w-5 h-5 animate-spin" fill="none" viewBox="0 0 24 24">
|
||||
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4" />
|
||||
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4z" />
|
||||
</svg>
|
||||
) : (
|
||||
<svg className="w-5 h-5" fill="none" viewBox="0 0 24 24" stroke="currentColor" strokeWidth={2}>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" d="M12 18.75a6 6 0 006-6v-1.5m-6 7.5a6 6 0 01-6-6v-1.5m6 7.5v3.75m-3.75 0h7.5M12 15.75a3 3 0 01-3-3V4.5a3 3 0 116 0v8.25a3 3 0 01-3 3z" />
|
||||
</svg>
|
||||
)}
|
||||
</button>
|
||||
)
|
||||
}
|
||||
18
homeai-desktop/src/hooks/useBridgeHealth.js
Normal file
18
homeai-desktop/src/hooks/useBridgeHealth.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import { useState, useEffect, useRef } from 'react'
|
||||
import { healthCheck } from '../lib/api'
|
||||
|
||||
export function useBridgeHealth() {
|
||||
const [isOnline, setIsOnline] = useState(null)
|
||||
const intervalRef = useRef(null)
|
||||
|
||||
useEffect(() => {
|
||||
const check = async () => {
|
||||
setIsOnline(await healthCheck())
|
||||
}
|
||||
check()
|
||||
intervalRef.current = setInterval(check, 15000)
|
||||
return () => clearInterval(intervalRef.current)
|
||||
}, [])
|
||||
|
||||
return isOnline
|
||||
}
|
||||
45
homeai-desktop/src/hooks/useChat.js
Normal file
45
homeai-desktop/src/hooks/useChat.js
Normal file
@@ -0,0 +1,45 @@
|
||||
import { useState, useCallback } from 'react'
|
||||
import { sendMessage } from '../lib/api'
|
||||
|
||||
export function useChat() {
|
||||
const [messages, setMessages] = useState([])
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
|
||||
const send = useCallback(async (text) => {
|
||||
if (!text.trim() || isLoading) return null
|
||||
|
||||
const userMsg = { id: Date.now(), role: 'user', content: text.trim(), timestamp: new Date() }
|
||||
setMessages((prev) => [...prev, userMsg])
|
||||
setIsLoading(true)
|
||||
|
||||
try {
|
||||
const response = await sendMessage(text.trim())
|
||||
const assistantMsg = {
|
||||
id: Date.now() + 1,
|
||||
role: 'assistant',
|
||||
content: response,
|
||||
timestamp: new Date(),
|
||||
}
|
||||
setMessages((prev) => [...prev, assistantMsg])
|
||||
return response
|
||||
} catch (err) {
|
||||
const errorMsg = {
|
||||
id: Date.now() + 1,
|
||||
role: 'assistant',
|
||||
content: `Error: ${err.message}`,
|
||||
timestamp: new Date(),
|
||||
isError: true,
|
||||
}
|
||||
setMessages((prev) => [...prev, errorMsg])
|
||||
return null
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [isLoading])
|
||||
|
||||
const clearHistory = useCallback(() => {
|
||||
setMessages([])
|
||||
}, [])
|
||||
|
||||
return { messages, isLoading, send, clearHistory }
|
||||
}
|
||||
27
homeai-desktop/src/hooks/useSettings.js
Normal file
27
homeai-desktop/src/hooks/useSettings.js
Normal file
@@ -0,0 +1,27 @@
|
||||
import { useState, useCallback } from 'react'
|
||||
import { DEFAULT_SETTINGS } from '../lib/constants'
|
||||
|
||||
const STORAGE_KEY = 'homeai_desktop_settings'
|
||||
|
||||
function loadSettings() {
|
||||
try {
|
||||
const stored = localStorage.getItem(STORAGE_KEY)
|
||||
return stored ? { ...DEFAULT_SETTINGS, ...JSON.parse(stored) } : { ...DEFAULT_SETTINGS }
|
||||
} catch {
|
||||
return { ...DEFAULT_SETTINGS }
|
||||
}
|
||||
}
|
||||
|
||||
export function useSettings() {
|
||||
const [settings, setSettings] = useState(loadSettings)
|
||||
|
||||
const updateSetting = useCallback((key, value) => {
|
||||
setSettings((prev) => {
|
||||
const next = { ...prev, [key]: value }
|
||||
localStorage.setItem(STORAGE_KEY, JSON.stringify(next))
|
||||
return next
|
||||
})
|
||||
}, [])
|
||||
|
||||
return { settings, updateSetting }
|
||||
}
|
||||
56
homeai-desktop/src/hooks/useTtsPlayback.js
Normal file
56
homeai-desktop/src/hooks/useTtsPlayback.js
Normal file
@@ -0,0 +1,56 @@
|
||||
import { useState, useRef, useCallback } from 'react'
|
||||
import { synthesize } from '../lib/api'
|
||||
|
||||
export function useTtsPlayback(voice) {
|
||||
const [isPlaying, setIsPlaying] = useState(false)
|
||||
const audioCtxRef = useRef(null)
|
||||
const sourceRef = useRef(null)
|
||||
|
||||
const getAudioContext = () => {
|
||||
if (!audioCtxRef.current || audioCtxRef.current.state === 'closed') {
|
||||
audioCtxRef.current = new AudioContext()
|
||||
}
|
||||
return audioCtxRef.current
|
||||
}
|
||||
|
||||
const speak = useCallback(async (text) => {
|
||||
if (!text) return
|
||||
|
||||
// Stop any current playback
|
||||
if (sourceRef.current) {
|
||||
try { sourceRef.current.stop() } catch {}
|
||||
}
|
||||
|
||||
setIsPlaying(true)
|
||||
try {
|
||||
const audioData = await synthesize(text, voice)
|
||||
const ctx = getAudioContext()
|
||||
if (ctx.state === 'suspended') await ctx.resume()
|
||||
|
||||
const audioBuffer = await ctx.decodeAudioData(audioData)
|
||||
const source = ctx.createBufferSource()
|
||||
source.buffer = audioBuffer
|
||||
source.connect(ctx.destination)
|
||||
sourceRef.current = source
|
||||
|
||||
source.onended = () => {
|
||||
setIsPlaying(false)
|
||||
sourceRef.current = null
|
||||
}
|
||||
source.start()
|
||||
} catch (err) {
|
||||
console.error('TTS playback error:', err)
|
||||
setIsPlaying(false)
|
||||
}
|
||||
}, [voice])
|
||||
|
||||
const stop = useCallback(() => {
|
||||
if (sourceRef.current) {
|
||||
try { sourceRef.current.stop() } catch {}
|
||||
sourceRef.current = null
|
||||
}
|
||||
setIsPlaying(false)
|
||||
}, [])
|
||||
|
||||
return { isPlaying, speak, stop }
|
||||
}
|
||||
93
homeai-desktop/src/hooks/useVoiceInput.js
Normal file
93
homeai-desktop/src/hooks/useVoiceInput.js
Normal file
@@ -0,0 +1,93 @@
|
||||
import { useState, useRef, useCallback } from 'react'
|
||||
import { createRecorder } from '../lib/audio'
|
||||
import { transcribe } from '../lib/api'
|
||||
|
||||
export function useVoiceInput(sttMode = 'bridge') {
|
||||
const [isRecording, setIsRecording] = useState(false)
|
||||
const [isTranscribing, setIsTranscribing] = useState(false)
|
||||
const recorderRef = useRef(null)
|
||||
const webSpeechRef = useRef(null)
|
||||
|
||||
const startRecording = useCallback(async () => {
|
||||
if (isRecording) return
|
||||
|
||||
if (sttMode === 'webspeech' && 'webkitSpeechRecognition' in window) {
|
||||
return startWebSpeech()
|
||||
}
|
||||
|
||||
try {
|
||||
const recorder = createRecorder()
|
||||
recorderRef.current = recorder
|
||||
await recorder.start()
|
||||
setIsRecording(true)
|
||||
} catch (err) {
|
||||
console.error('Mic access error:', err)
|
||||
}
|
||||
}, [isRecording, sttMode])
|
||||
|
||||
const stopRecording = useCallback(async () => {
|
||||
if (!isRecording) return null
|
||||
|
||||
if (sttMode === 'webspeech' && webSpeechRef.current) {
|
||||
return stopWebSpeech()
|
||||
}
|
||||
|
||||
setIsRecording(false)
|
||||
setIsTranscribing(true)
|
||||
|
||||
try {
|
||||
const wavBlob = await recorderRef.current.stop()
|
||||
recorderRef.current = null
|
||||
const text = await transcribe(wavBlob)
|
||||
return text
|
||||
} catch (err) {
|
||||
console.error('Transcription error:', err)
|
||||
return null
|
||||
} finally {
|
||||
setIsTranscribing(false)
|
||||
}
|
||||
}, [isRecording, sttMode])
|
||||
|
||||
// Web Speech API fallback
|
||||
function startWebSpeech() {
|
||||
return new Promise((resolve) => {
|
||||
const SpeechRecognition = window.webkitSpeechRecognition || window.SpeechRecognition
|
||||
const recognition = new SpeechRecognition()
|
||||
recognition.continuous = false
|
||||
recognition.interimResults = false
|
||||
recognition.lang = 'en-US'
|
||||
webSpeechRef.current = { recognition, resolve: null }
|
||||
recognition.start()
|
||||
setIsRecording(true)
|
||||
resolve()
|
||||
})
|
||||
}
|
||||
|
||||
function stopWebSpeech() {
|
||||
return new Promise((resolve) => {
|
||||
const { recognition } = webSpeechRef.current
|
||||
recognition.onresult = (e) => {
|
||||
const text = e.results[0]?.[0]?.transcript || ''
|
||||
setIsRecording(false)
|
||||
webSpeechRef.current = null
|
||||
resolve(text)
|
||||
}
|
||||
recognition.onerror = () => {
|
||||
setIsRecording(false)
|
||||
webSpeechRef.current = null
|
||||
resolve(null)
|
||||
}
|
||||
recognition.onend = () => {
|
||||
// If no result event fired, resolve with null
|
||||
setIsRecording(false)
|
||||
if (webSpeechRef.current) {
|
||||
webSpeechRef.current = null
|
||||
resolve(null)
|
||||
}
|
||||
}
|
||||
recognition.stop()
|
||||
})
|
||||
}
|
||||
|
||||
return { isRecording, isTranscribing, startRecording, stopRecording }
|
||||
}
|
||||
1
homeai-desktop/src/index.css
Normal file
1
homeai-desktop/src/index.css
Normal file
@@ -0,0 +1 @@
|
||||
@import "tailwindcss";
|
||||
44
homeai-desktop/src/lib/api.js
Normal file
44
homeai-desktop/src/lib/api.js
Normal file
@@ -0,0 +1,44 @@
|
||||
export async function sendMessage(text) {
|
||||
const res = await fetch('/api/agent/message', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ message: text, agent: 'main' }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const err = await res.json().catch(() => ({ error: 'Request failed' }))
|
||||
throw new Error(err.error || `HTTP ${res.status}`)
|
||||
}
|
||||
const data = await res.json()
|
||||
return data.response
|
||||
}
|
||||
|
||||
export async function synthesize(text, voice) {
|
||||
const res = await fetch('/api/tts', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ text, voice }),
|
||||
})
|
||||
if (!res.ok) throw new Error('TTS failed')
|
||||
return await res.arrayBuffer()
|
||||
}
|
||||
|
||||
export async function transcribe(wavBlob) {
|
||||
const res = await fetch('/api/stt', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'audio/wav' },
|
||||
body: wavBlob,
|
||||
})
|
||||
if (!res.ok) throw new Error('STT failed')
|
||||
const data = await res.json()
|
||||
return data.text
|
||||
}
|
||||
|
||||
export async function healthCheck() {
|
||||
try {
|
||||
const res = await fetch('/api/health', { signal: AbortSignal.timeout(5000) })
|
||||
const data = await res.json()
|
||||
return data.status === 'online'
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
103
homeai-desktop/src/lib/audio.js
Normal file
103
homeai-desktop/src/lib/audio.js
Normal file
@@ -0,0 +1,103 @@
|
||||
const TARGET_RATE = 16000
|
||||
|
||||
/**
|
||||
* Record audio from the microphone, returning a WAV blob when stopped.
|
||||
* Returns { start, stop } — call start() to begin, stop() resolves with a Blob.
|
||||
*/
|
||||
export function createRecorder() {
|
||||
let audioCtx
|
||||
let source
|
||||
let processor
|
||||
let stream
|
||||
let samples = []
|
||||
|
||||
async function start() {
|
||||
samples = []
|
||||
stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: { channelCount: 1, sampleRate: TARGET_RATE },
|
||||
})
|
||||
audioCtx = new AudioContext({ sampleRate: TARGET_RATE })
|
||||
source = audioCtx.createMediaStreamSource(stream)
|
||||
|
||||
// ScriptProcessorNode captures raw Float32 PCM
|
||||
processor = audioCtx.createScriptProcessor(4096, 1, 1)
|
||||
processor.onaudioprocess = (e) => {
|
||||
const input = e.inputBuffer.getChannelData(0)
|
||||
samples.push(new Float32Array(input))
|
||||
}
|
||||
source.connect(processor)
|
||||
processor.connect(audioCtx.destination)
|
||||
}
|
||||
|
||||
async function stop() {
|
||||
// Stop everything
|
||||
processor.disconnect()
|
||||
source.disconnect()
|
||||
stream.getTracks().forEach((t) => t.stop())
|
||||
await audioCtx.close()
|
||||
|
||||
// Merge all sample chunks
|
||||
const totalLength = samples.reduce((acc, s) => acc + s.length, 0)
|
||||
const merged = new Float32Array(totalLength)
|
||||
let offset = 0
|
||||
for (const chunk of samples) {
|
||||
merged.set(chunk, offset)
|
||||
offset += chunk.length
|
||||
}
|
||||
|
||||
// Resample if the actual sample rate differs from target
|
||||
const resampled = audioCtx.sampleRate !== TARGET_RATE
|
||||
? resample(merged, audioCtx.sampleRate, TARGET_RATE)
|
||||
: merged
|
||||
|
||||
// Convert to 16-bit PCM WAV
|
||||
return encodeWav(resampled, TARGET_RATE)
|
||||
}
|
||||
|
||||
return { start, stop }
|
||||
}
|
||||
|
||||
function resample(samples, fromRate, toRate) {
|
||||
const ratio = fromRate / toRate
|
||||
const newLength = Math.round(samples.length / ratio)
|
||||
const result = new Float32Array(newLength)
|
||||
for (let i = 0; i < newLength; i++) {
|
||||
result[i] = samples[Math.round(i * ratio)]
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
function encodeWav(samples, sampleRate) {
|
||||
const numSamples = samples.length
|
||||
const buffer = new ArrayBuffer(44 + numSamples * 2)
|
||||
const view = new DataView(buffer)
|
||||
|
||||
// WAV header
|
||||
writeString(view, 0, 'RIFF')
|
||||
view.setUint32(4, 36 + numSamples * 2, true)
|
||||
writeString(view, 8, 'WAVE')
|
||||
writeString(view, 12, 'fmt ')
|
||||
view.setUint32(16, 16, true) // chunk size
|
||||
view.setUint16(20, 1, true) // PCM
|
||||
view.setUint16(22, 1, true) // mono
|
||||
view.setUint32(24, sampleRate, true)
|
||||
view.setUint32(28, sampleRate * 2, true) // byte rate
|
||||
view.setUint16(32, 2, true) // block align
|
||||
view.setUint16(34, 16, true) // bits per sample
|
||||
writeString(view, 36, 'data')
|
||||
view.setUint32(40, numSamples * 2, true)
|
||||
|
||||
// PCM data — clamp Float32 to Int16
|
||||
for (let i = 0; i < numSamples; i++) {
|
||||
const s = Math.max(-1, Math.min(1, samples[i]))
|
||||
view.setInt16(44 + i * 2, s < 0 ? s * 0x8000 : s * 0x7fff, true)
|
||||
}
|
||||
|
||||
return new Blob([buffer], { type: 'audio/wav' })
|
||||
}
|
||||
|
||||
function writeString(view, offset, str) {
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
view.setUint8(offset + i, str.charCodeAt(i))
|
||||
}
|
||||
}
|
||||
37
homeai-desktop/src/lib/constants.js
Normal file
37
homeai-desktop/src/lib/constants.js
Normal file
@@ -0,0 +1,37 @@
|
||||
export const DEFAULT_VOICE = 'af_heart'
|
||||
|
||||
export const VOICES = [
|
||||
{ id: 'af_heart', label: 'Heart (F, US)' },
|
||||
{ id: 'af_alloy', label: 'Alloy (F, US)' },
|
||||
{ id: 'af_aoede', label: 'Aoede (F, US)' },
|
||||
{ id: 'af_bella', label: 'Bella (F, US)' },
|
||||
{ id: 'af_jessica', label: 'Jessica (F, US)' },
|
||||
{ id: 'af_kore', label: 'Kore (F, US)' },
|
||||
{ id: 'af_nicole', label: 'Nicole (F, US)' },
|
||||
{ id: 'af_nova', label: 'Nova (F, US)' },
|
||||
{ id: 'af_river', label: 'River (F, US)' },
|
||||
{ id: 'af_sarah', label: 'Sarah (F, US)' },
|
||||
{ id: 'af_sky', label: 'Sky (F, US)' },
|
||||
{ id: 'am_adam', label: 'Adam (M, US)' },
|
||||
{ id: 'am_echo', label: 'Echo (M, US)' },
|
||||
{ id: 'am_eric', label: 'Eric (M, US)' },
|
||||
{ id: 'am_fenrir', label: 'Fenrir (M, US)' },
|
||||
{ id: 'am_liam', label: 'Liam (M, US)' },
|
||||
{ id: 'am_michael', label: 'Michael (M, US)' },
|
||||
{ id: 'am_onyx', label: 'Onyx (M, US)' },
|
||||
{ id: 'am_puck', label: 'Puck (M, US)' },
|
||||
{ id: 'bf_alice', label: 'Alice (F, UK)' },
|
||||
{ id: 'bf_emma', label: 'Emma (F, UK)' },
|
||||
{ id: 'bf_isabella', label: 'Isabella (F, UK)' },
|
||||
{ id: 'bf_lily', label: 'Lily (F, UK)' },
|
||||
{ id: 'bm_daniel', label: 'Daniel (M, UK)' },
|
||||
{ id: 'bm_fable', label: 'Fable (M, UK)' },
|
||||
{ id: 'bm_george', label: 'George (M, UK)' },
|
||||
{ id: 'bm_lewis', label: 'Lewis (M, UK)' },
|
||||
]
|
||||
|
||||
export const DEFAULT_SETTINGS = {
|
||||
voice: DEFAULT_VOICE,
|
||||
autoTts: true,
|
||||
sttMode: 'bridge', // 'bridge' or 'webspeech'
|
||||
}
|
||||
10
homeai-desktop/src/main.jsx
Normal file
10
homeai-desktop/src/main.jsx
Normal file
@@ -0,0 +1,10 @@
|
||||
import { StrictMode } from 'react'
|
||||
import { createRoot } from 'react-dom/client'
|
||||
import './index.css'
|
||||
import App from './App.jsx'
|
||||
|
||||
createRoot(document.getElementById('root')).render(
|
||||
<StrictMode>
|
||||
<App />
|
||||
</StrictMode>,
|
||||
)
|
||||
102
homeai-desktop/vite.config.js
Normal file
102
homeai-desktop/vite.config.js
Normal file
@@ -0,0 +1,102 @@
|
||||
import { defineConfig } from 'vite'
|
||||
import react from '@vitejs/plugin-react'
|
||||
import tailwindcss from '@tailwindcss/vite'
|
||||
|
||||
function bridgeProxyPlugin() {
|
||||
return {
|
||||
name: 'bridge-proxy',
|
||||
configureServer(server) {
|
||||
// Proxy a request to the OpenClaw bridge
|
||||
const proxyRequest = (targetPath) => async (req, res) => {
|
||||
if (req.method === 'OPTIONS') {
|
||||
res.writeHead(204, {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'POST, GET, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type',
|
||||
})
|
||||
res.end()
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const { default: http } = await import('http')
|
||||
const chunks = []
|
||||
for await (const chunk of req) chunks.push(chunk)
|
||||
const body = Buffer.concat(chunks)
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
const proxyReq = http.request(
|
||||
`http://localhost:8081${targetPath}`,
|
||||
{
|
||||
method: req.method,
|
||||
headers: {
|
||||
'Content-Type': req.headers['content-type'] || 'application/json',
|
||||
'Content-Length': body.length,
|
||||
},
|
||||
timeout: 120000,
|
||||
},
|
||||
(proxyRes) => {
|
||||
res.writeHead(proxyRes.statusCode, {
|
||||
'Content-Type': proxyRes.headers['content-type'] || 'application/json',
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
})
|
||||
proxyRes.pipe(res)
|
||||
proxyRes.on('end', resolve)
|
||||
proxyRes.on('error', resolve)
|
||||
}
|
||||
)
|
||||
proxyReq.on('error', reject)
|
||||
proxyReq.on('timeout', () => {
|
||||
proxyReq.destroy()
|
||||
reject(new Error('timeout'))
|
||||
})
|
||||
proxyReq.write(body)
|
||||
proxyReq.end()
|
||||
})
|
||||
} catch {
|
||||
if (!res.headersSent) {
|
||||
res.writeHead(502, { 'Content-Type': 'application/json' })
|
||||
res.end(JSON.stringify({ error: 'Bridge unreachable' }))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
server.middlewares.use('/api/agent/message', proxyRequest('/api/agent/message'))
|
||||
server.middlewares.use('/api/tts', proxyRequest('/api/tts'))
|
||||
server.middlewares.use('/api/stt', proxyRequest('/api/stt'))
|
||||
|
||||
// Health check — direct to bridge
|
||||
server.middlewares.use('/api/health', async (req, res) => {
|
||||
try {
|
||||
const { default: http } = await import('http')
|
||||
const start = Date.now()
|
||||
await new Promise((resolve, reject) => {
|
||||
const reqObj = http.get('http://localhost:8081/', { timeout: 5000 }, (resp) => {
|
||||
resp.resume()
|
||||
resolve()
|
||||
})
|
||||
reqObj.on('error', reject)
|
||||
reqObj.on('timeout', () => { reqObj.destroy(); reject(new Error('timeout')) })
|
||||
})
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' })
|
||||
res.end(JSON.stringify({ status: 'online', responseTime: Date.now() - start }))
|
||||
} catch {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' })
|
||||
res.end(JSON.stringify({ status: 'offline', responseTime: null }))
|
||||
}
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [
|
||||
bridgeProxyPlugin(),
|
||||
tailwindcss(),
|
||||
react(),
|
||||
],
|
||||
server: {
|
||||
host: '0.0.0.0',
|
||||
port: 5174,
|
||||
},
|
||||
})
|
||||
Reference in New Issue
Block a user