'use client'; import { useRef, useState, useEffect, useCallback } from 'react'; import { useStore } from '@/lib/store'; import { api } from '@/lib/api'; import type { Message } from '@/lib/api'; const MAX_RECORDING_SECONDS = 60; export default function MessageInput() { const { currentId, setCurrentId, addMessage, setIsLoading, isLoading, setConversations } = useStore(); const [text, setText] = useState(''); const [recording, setRecording] = useState(false); const [transcribing, setTranscribing] = useState(false); const [countdown, setCountdown] = useState(0); const textareaRef = useRef(null); const mediaRecorderRef = useRef(null); const chunksRef = useRef([]); const streamRef = useRef(null); const wakeLockRef = useRef(null); const countdownRef = useRef | null>(null); const mimeTypeRef = useRef('audio/webm'); useEffect(() => { textareaRef.current?.focus(); }, [currentId]); function autoResize() { const el = textareaRef.current; if (!el) return; el.style.height = 'auto'; el.style.height = Math.min(el.scrollHeight, 160) + 'px'; } const send = useCallback(async (messageOverride?: string) => { const message = (messageOverride ?? text).trim(); if (!message || isLoading) return; setText(''); if (textareaRef.current) textareaRef.current.style.height = 'auto'; let convId = currentId; if (!convId) { const conv = await api.newConversation(); convId = conv.id; setCurrentId(convId); } const userMsg: Message = { role: 'user', content: message, sources: [], timestamp: new Date().toISOString(), }; addMessage(userMsg); setIsLoading(true); try { const data = await api.sendMessage(message, convId); setCurrentId(data.conversation_id); addMessage({ role: 'assistant', content: data.response, sources: data.sources || [], timestamp: new Date().toISOString(), }); const updated = await api.getConversations(); setConversations(updated); } catch { addMessage({ role: 'assistant', content: 'Error — please try again.', sources: [], timestamp: new Date().toISOString(), }); } finally { setIsLoading(false); textareaRef.current?.focus(); } }, [text, isLoading, currentId, addMessage, setCurrentId, setIsLoading, setConversations]); async function acquireWakeLock() { try { if ('wakeLock' in navigator) { wakeLockRef.current = await (navigator as Navigator & { wakeLock: { request: (type: string) => Promise } }).wakeLock.request('screen'); } } catch { // Wake lock not supported or denied — not critical } } function releaseWakeLock() { wakeLockRef.current?.release(); wakeLockRef.current = null; } function stopRecording() { if (countdownRef.current) { clearInterval(countdownRef.current); countdownRef.current = null; } setCountdown(0); releaseWakeLock(); mediaRecorderRef.current?.stop(); setRecording(false); } async function startRecording() { try { // Reuse existing stream if available, otherwise request new permission if (!streamRef.current || streamRef.current.getTracks().every(t => t.readyState === 'ended')) { streamRef.current = await navigator.mediaDevices.getUserMedia({ audio: true }); } const mimeType = MediaRecorder.isTypeSupported('audio/webm') ? 'audio/webm' : MediaRecorder.isTypeSupported('audio/mp4') ? 'audio/mp4' : 'audio/ogg'; mimeTypeRef.current = mimeType; chunksRef.current = []; const mr = new MediaRecorder(streamRef.current, { mimeType }); mr.ondataavailable = e => { if (e.data.size > 0) chunksRef.current.push(e.data); }; mr.onstop = async () => { if (chunksRef.current.length === 0) return; setTranscribing(true); let transcript = ''; try { const blob = new Blob(chunksRef.current, { type: mimeTypeRef.current }); const result = await api.transcribe(blob); transcript = result.text.trim(); } catch (e) { console.error('Transcription failed', e); } finally { setTranscribing(false); } if (transcript) { // Auto-send after transcription await send(transcript); } }; mr.start(1000); mediaRecorderRef.current = mr; setRecording(true); setCountdown(MAX_RECORDING_SECONDS); // Prevent screen lock during recording await acquireWakeLock(); // Countdown timer — auto-stop at max duration countdownRef.current = setInterval(() => { setCountdown(prev => { if (prev <= 1) { stopRecording(); return 0; } return prev - 1; }); }, 1000); } catch { alert('Microphone access denied'); } } async function toggleRecording() { if (recording) { stopRecording(); } else { await startRecording(); } } function handleKeyDown(e: React.KeyboardEvent) { if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault(); send(); } } const micColor = recording ? '#e8f5ed' : transcribing ? '#e8f5ed' : 'var(--text3)'; const micBg = recording ? '#a32d2d' : transcribing ? 'var(--accent)' : 'var(--bg3)'; return (
{/* Mic button */} {/* Text input */}