Files

294 lines
9.0 KiB
TypeScript

'use client';
import { useRef, useState, useEffect, useCallback } from 'react';
import { useStore } from '@/lib/store';
import { api } from '@/lib/api';
import type { Message } from '@/lib/api';
const MAX_RECORDING_SECONDS = 60;
export default function MessageInput() {
const { currentId, setCurrentId, addMessage, setIsLoading, isLoading, setConversations } = useStore();
const [text, setText] = useState('');
const [recording, setRecording] = useState(false);
const [transcribing, setTranscribing] = useState(false);
const [countdown, setCountdown] = useState(0);
const textareaRef = useRef<HTMLTextAreaElement>(null);
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
const chunksRef = useRef<Blob[]>([]);
const streamRef = useRef<MediaStream | null>(null);
const wakeLockRef = useRef<WakeLockSentinel | null>(null);
const countdownRef = useRef<ReturnType<typeof setInterval> | null>(null);
const mimeTypeRef = useRef<string>('audio/webm');
useEffect(() => {
textareaRef.current?.focus();
}, [currentId]);
function autoResize() {
const el = textareaRef.current;
if (!el) return;
el.style.height = 'auto';
el.style.height = Math.min(el.scrollHeight, 160) + 'px';
}
const send = useCallback(async (messageOverride?: string) => {
const message = (messageOverride ?? text).trim();
if (!message || isLoading) return;
setText('');
if (textareaRef.current) textareaRef.current.style.height = 'auto';
let convId = currentId;
if (!convId) {
const conv = await api.newConversation();
convId = conv.id;
setCurrentId(convId);
}
const userMsg: Message = {
role: 'user',
content: message,
sources: [],
timestamp: new Date().toISOString(),
};
addMessage(userMsg);
setIsLoading(true);
try {
const data = await api.sendMessage(message, convId);
setCurrentId(data.conversation_id);
addMessage({
role: 'assistant',
content: data.response,
sources: data.sources || [],
timestamp: new Date().toISOString(),
});
const updated = await api.getConversations();
setConversations(updated);
} catch {
addMessage({
role: 'assistant',
content: 'Error — please try again.',
sources: [],
timestamp: new Date().toISOString(),
});
} finally {
setIsLoading(false);
textareaRef.current?.focus();
}
}, [text, isLoading, currentId, addMessage, setCurrentId, setIsLoading, setConversations]);
async function acquireWakeLock() {
try {
if ('wakeLock' in navigator) {
wakeLockRef.current = await (navigator as Navigator & { wakeLock: { request: (type: string) => Promise<WakeLockSentinel> } }).wakeLock.request('screen');
}
} catch {
// Wake lock not supported or denied — not critical
}
}
function releaseWakeLock() {
wakeLockRef.current?.release();
wakeLockRef.current = null;
}
function stopRecording() {
if (countdownRef.current) {
clearInterval(countdownRef.current);
countdownRef.current = null;
}
setCountdown(0);
releaseWakeLock();
mediaRecorderRef.current?.stop();
setRecording(false);
}
async function startRecording() {
try {
// Reuse existing stream if available, otherwise request new permission
if (!streamRef.current || streamRef.current.getTracks().every(t => t.readyState === 'ended')) {
streamRef.current = await navigator.mediaDevices.getUserMedia({ audio: true });
}
const mimeType = MediaRecorder.isTypeSupported('audio/webm')
? 'audio/webm'
: MediaRecorder.isTypeSupported('audio/mp4')
? 'audio/mp4'
: 'audio/ogg';
mimeTypeRef.current = mimeType;
chunksRef.current = [];
const mr = new MediaRecorder(streamRef.current, { mimeType });
mr.ondataavailable = e => {
if (e.data.size > 0) chunksRef.current.push(e.data);
};
mr.onstop = async () => {
if (chunksRef.current.length === 0) return;
setTranscribing(true);
let transcript = '';
try {
const blob = new Blob(chunksRef.current, { type: mimeTypeRef.current });
const result = await api.transcribe(blob);
transcript = result.text.trim();
} catch (e) {
console.error('Transcription failed', e);
} finally {
setTranscribing(false);
}
if (transcript) {
// Auto-send after transcription
await send(transcript);
}
};
mr.start(1000);
mediaRecorderRef.current = mr;
setRecording(true);
setCountdown(MAX_RECORDING_SECONDS);
// Prevent screen lock during recording
await acquireWakeLock();
// Countdown timer — auto-stop at max duration
countdownRef.current = setInterval(() => {
setCountdown(prev => {
if (prev <= 1) {
stopRecording();
return 0;
}
return prev - 1;
});
}, 1000);
} catch {
alert('Microphone access denied');
}
}
async function toggleRecording() {
if (recording) {
stopRecording();
} else {
await startRecording();
}
}
function handleKeyDown(e: React.KeyboardEvent) {
if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault();
send();
}
}
const micColor = recording ? '#e8f5ed' : transcribing ? '#e8f5ed' : 'var(--text3)';
const micBg = recording ? '#a32d2d' : transcribing ? 'var(--accent)' : 'var(--bg3)';
return (
<div
className="flex gap-2 items-end flex-shrink-0"
style={{
borderTop: '1px solid var(--border)',
padding: '12px 16px',
paddingBottom: 'max(16px, env(safe-area-inset-bottom))',
paddingLeft: 'max(20px, env(safe-area-inset-left))',
paddingRight: 'max(20px, env(safe-area-inset-right))',
}}
>
{/* Mic button */}
<button
onPointerUp={toggleRecording}
className="flex-shrink-0 rounded-lg flex flex-col items-center justify-center transition-all"
style={{
width: '44px',
height: '44px',
background: micBg,
border: 'none',
cursor: 'pointer',
color: micColor,
touchAction: 'manipulation',
flexShrink: 0,
position: 'relative',
}}
aria-label={recording ? 'Stop recording' : transcribing ? 'Transcribing...' : 'Start recording'}
>
{transcribing ? (
<svg width="18" height="18" viewBox="0 0 24 24" fill="currentColor">
<circle cx="12" cy="12" r="3" opacity="0.6">
<animate attributeName="opacity" values="0.6;1;0.6" dur="1s" repeatCount="indefinite"/>
</circle>
</svg>
) : recording ? (
<>
<svg width="12" height="12" viewBox="0 0 14 14" fill="currentColor">
<rect width="14" height="14" rx="2"/>
</svg>
{countdown <= 15 && (
<span style={{ fontSize: '9px', lineHeight: 1, marginTop: '2px', opacity: 0.9 }}>
{countdown}s
</span>
)}
</>
) : (
<svg width="18" height="18" viewBox="0 0 24 24" fill="currentColor">
<path d="M12 1a4 4 0 0 1 4 4v6a4 4 0 0 1-8 0V5a4 4 0 0 1 4-4zm0 2a2 2 0 0 0-2 2v6a2 2 0 0 0 4 0V5a2 2 0 0 0-2-2zm-7 8a7 7 0 0 0 14 0h2a9 9 0 0 1-8 8.94V22h-2v-2.06A9 9 0 0 1 3 11h2z"/>
</svg>
)}
</button>
{/* Text input */}
<div
className="flex-1 rounded-xl min-w-0 overflow-hidden"
style={{ background: 'var(--bg2)', border: '1px solid var(--border2)' }}
>
<textarea
ref={textareaRef}
value={text}
onChange={e => { setText(e.target.value); autoResize(); }}
onKeyDown={handleKeyDown}
placeholder={
recording
? `Recording... ${countdown > 0 ? countdown + 's left' : ''}`
: transcribing
? 'Transcribing...'
: 'Ask anything...'
}
rows={1}
className="w-full block resize-none outline-none bg-transparent px-3 py-3 leading-relaxed min-w-0"
style={{
fontSize: 'var(--font-size)',
color: 'var(--text)',
minHeight: '44px',
maxHeight: '160px',
fontFamily: 'var(--font-sans)',
}}
/>
</div>
{/* Send button */}
<button
onPointerUp={() => send()}
disabled={isLoading || !text.trim()}
className="flex-shrink-0 rounded-lg px-4 text-sm font-medium transition-opacity"
style={{
background: 'var(--accent)',
color: '#e8f5ed',
border: 'none',
cursor: isLoading || !text.trim() ? 'not-allowed' : 'pointer',
opacity: isLoading || !text.trim() ? 0.4 : 1,
minHeight: '44px',
fontFamily: 'var(--font-sans)',
touchAction: 'manipulation',
}}
>
Send
</button>
</div>
);
}