Adds the desktop web assistant app (Vite + React) with OpenClaw bridge proxy and exposes it on the local network (host: 0.0.0.0, port 5174). Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
104 lines
3.0 KiB
JavaScript
104 lines
3.0 KiB
JavaScript
const TARGET_RATE = 16000
|
|
|
|
/**
|
|
* Record audio from the microphone, returning a WAV blob when stopped.
|
|
* Returns { start, stop } — call start() to begin, stop() resolves with a Blob.
|
|
*/
|
|
export function createRecorder() {
|
|
let audioCtx
|
|
let source
|
|
let processor
|
|
let stream
|
|
let samples = []
|
|
|
|
async function start() {
|
|
samples = []
|
|
stream = await navigator.mediaDevices.getUserMedia({
|
|
audio: { channelCount: 1, sampleRate: TARGET_RATE },
|
|
})
|
|
audioCtx = new AudioContext({ sampleRate: TARGET_RATE })
|
|
source = audioCtx.createMediaStreamSource(stream)
|
|
|
|
// ScriptProcessorNode captures raw Float32 PCM
|
|
processor = audioCtx.createScriptProcessor(4096, 1, 1)
|
|
processor.onaudioprocess = (e) => {
|
|
const input = e.inputBuffer.getChannelData(0)
|
|
samples.push(new Float32Array(input))
|
|
}
|
|
source.connect(processor)
|
|
processor.connect(audioCtx.destination)
|
|
}
|
|
|
|
async function stop() {
|
|
// Stop everything
|
|
processor.disconnect()
|
|
source.disconnect()
|
|
stream.getTracks().forEach((t) => t.stop())
|
|
await audioCtx.close()
|
|
|
|
// Merge all sample chunks
|
|
const totalLength = samples.reduce((acc, s) => acc + s.length, 0)
|
|
const merged = new Float32Array(totalLength)
|
|
let offset = 0
|
|
for (const chunk of samples) {
|
|
merged.set(chunk, offset)
|
|
offset += chunk.length
|
|
}
|
|
|
|
// Resample if the actual sample rate differs from target
|
|
const resampled = audioCtx.sampleRate !== TARGET_RATE
|
|
? resample(merged, audioCtx.sampleRate, TARGET_RATE)
|
|
: merged
|
|
|
|
// Convert to 16-bit PCM WAV
|
|
return encodeWav(resampled, TARGET_RATE)
|
|
}
|
|
|
|
return { start, stop }
|
|
}
|
|
|
|
function resample(samples, fromRate, toRate) {
|
|
const ratio = fromRate / toRate
|
|
const newLength = Math.round(samples.length / ratio)
|
|
const result = new Float32Array(newLength)
|
|
for (let i = 0; i < newLength; i++) {
|
|
result[i] = samples[Math.round(i * ratio)]
|
|
}
|
|
return result
|
|
}
|
|
|
|
function encodeWav(samples, sampleRate) {
|
|
const numSamples = samples.length
|
|
const buffer = new ArrayBuffer(44 + numSamples * 2)
|
|
const view = new DataView(buffer)
|
|
|
|
// WAV header
|
|
writeString(view, 0, 'RIFF')
|
|
view.setUint32(4, 36 + numSamples * 2, true)
|
|
writeString(view, 8, 'WAVE')
|
|
writeString(view, 12, 'fmt ')
|
|
view.setUint32(16, 16, true) // chunk size
|
|
view.setUint16(20, 1, true) // PCM
|
|
view.setUint16(22, 1, true) // mono
|
|
view.setUint32(24, sampleRate, true)
|
|
view.setUint32(28, sampleRate * 2, true) // byte rate
|
|
view.setUint16(32, 2, true) // block align
|
|
view.setUint16(34, 16, true) // bits per sample
|
|
writeString(view, 36, 'data')
|
|
view.setUint32(40, numSamples * 2, true)
|
|
|
|
// PCM data — clamp Float32 to Int16
|
|
for (let i = 0; i < numSamples; i++) {
|
|
const s = Math.max(-1, Math.min(1, samples[i]))
|
|
view.setInt16(44 + i * 2, s < 0 ? s * 0x8000 : s * 0x7fff, true)
|
|
}
|
|
|
|
return new Blob([buffer], { type: 'audio/wav' })
|
|
}
|
|
|
|
function writeString(view, offset, str) {
|
|
for (let i = 0; i < str.length; i++) {
|
|
view.setUint8(offset + i, str.charCodeAt(i))
|
|
}
|
|
}
|