/** * CommCoach Dossier View (Main View) * * Unified view per context: Coaching session, Tasks, Sessions history, Scores, Documents. * Voice first, always with text fallback. */ import React, { useState, useRef, useCallback, useEffect } from 'react'; import { useCommcoach } from '../../../hooks/useCommcoach'; import { useApiRequest } from '../../../hooks/useApi'; import { useInstanceId } from '../../../hooks/useCurrentInstance'; import api from '../../../api'; import { getDossierExportUrl, getSessionExportUrl, getDocumentsApi, uploadDocumentApi, deleteDocumentApi, getScoreHistoryApi, getPersonasApi, type CoachingDocument, type CoachingPersona, } from '../../../api/commcoachApi'; import AutoScroll from '../../../components/UiComponents/AutoScroll/AutoScroll'; import ReactMarkdown from 'react-markdown'; import remarkGfm from 'remark-gfm'; import styles from './CommcoachDossierView.module.css'; type TabKey = 'coaching' | 'tasks' | 'sessions' | 'scores' | 'documents'; export const CommcoachDossierView: React.FC = () => { const coach = useCommcoach(); const { request } = useApiRequest(); const instanceId = useInstanceId(); const [activeTab, setActiveTab] = useState('coaching'); const [showNewContext, setShowNewContext] = useState(false); const [newTitle, setNewTitle] = useState(''); const [newDescription, setNewDescription] = useState(''); const [newCategory, setNewCategory] = useState('custom'); const [newTaskTitle, setNewTaskTitle] = useState(''); const [documents, setDocuments] = useState([]); const [uploading, setUploading] = useState(false); const [scoreHistory, setScoreHistory] = useState>>({}); const [personas, setPersonas] = useState([]); const [selectedPersonaId, setSelectedPersonaId] = useState(undefined); const inputRef = useRef(null); const streamRef = useRef(null); const speechRecognitionRef = useRef(null); const transcriptPartsRef = useRef([]); const processedResultIndexRef = useRef(0); const silenceTimerRef = useRef | null>(null); const [isListening, setIsListening] = useState(false); const [isUserSpeaking, setIsUserSpeaking] = useState(false); const [liveTranscript, setLiveTranscript] = useState(''); const [isTtsPlaying, setIsTtsPlaying] = useState(false); // #region agent log const debugLogsRef = useRef([]); const [debugVisible, setDebugVisible] = useState(false); const [debugSnapshot, setDebugSnapshot] = useState([]); const _dlog = useCallback((tag: string, info?: string) => { const t = new Date(); const ts = `${t.getMinutes()}:${String(t.getSeconds()).padStart(2,'0')}.${String(t.getMilliseconds()).padStart(3,'0')}`; const entry = `[${ts}] ${tag}${info ? ' ' + info : ''}`; debugLogsRef.current.push(entry); if (debugLogsRef.current.length > 80) debugLogsRef.current.shift(); }, []); useEffect(() => { (window as any).__dlog = _dlog; return () => { delete (window as any).__dlog; }; }, [_dlog]); // #endregion // Auto-select first context useEffect(() => { if (!coach.selectedContextId && coach.contexts.length > 0) { coach.selectContext(coach.contexts[0].id, { skipSessionResume: true }); } }, [coach.contexts, coach.selectedContextId, coach.selectContext]); // Load documents, scores, personas when context changes useEffect(() => { if (!instanceId || !coach.selectedContextId) return; getDocumentsApi(request, instanceId, coach.selectedContextId) .then(d => setDocuments(d)) .catch(() => {}); getScoreHistoryApi(request, instanceId, coach.selectedContextId) .then(h => setScoreHistory(h)) .catch(() => {}); }, [instanceId, request, coach.selectedContextId]); useEffect(() => { coach.onDocumentCreatedRef.current = (doc) => { setDocuments(prev => { if (prev.some(d => d.id === doc.id)) return prev; return [doc, ...prev]; }); }; return () => { coach.onDocumentCreatedRef.current = null; }; }, [coach.onDocumentCreatedRef]); useEffect(() => { if (!instanceId) return; getPersonasApi(request, instanceId) .then(p => setPersonas(p)) .catch(() => {}); }, [instanceId, request]); // TTS playing state sync useEffect(() => { if (!coach.session) return; const interval = setInterval(() => { setIsTtsPlaying(coach.isTtsPlayingRef.current); }, 200); return () => clearInterval(interval); }, [coach.session, coach.isTtsPlayingRef]); // Speech Recognition (only when coaching tab active + session running + not muted) useEffect(() => { if (activeTab !== 'coaching' || !coach.session || coach.isMuted) { if (speechRecognitionRef.current) { try { speechRecognitionRef.current.stop(); } catch { /* ignore */ } speechRecognitionRef.current = null; } if (streamRef.current) { streamRef.current.getTracks().forEach(t => t.stop()); streamRef.current = null; } setIsListening(false); setIsUserSpeaking(false); return; } const SpeechRecognitionApi = (window as any).SpeechRecognition || (window as any).webkitSpeechRecognition; if (!SpeechRecognitionApi) return; let cancelled = false; const MIN_WORDS_TO_INTERRUPT = 4; const init = async () => { try { const stream = await navigator.mediaDevices.getUserMedia({ audio: { echoCancellation: true, noiseSuppression: true }, }); if (cancelled) { stream.getTracks().forEach(t => t.stop()); return; } streamRef.current = stream; setIsListening(true); const recognition = new SpeechRecognitionApi(); recognition.continuous = true; recognition.interimResults = true; recognition.lang = 'de-DE'; const SILENCE_TIMEOUT_MS = 1500; const _sendAndClearTranscript = () => { const fullTranscript = transcriptPartsRef.current.join(' ').trim(); // #region agent log const wc = fullTranscript.split(/\s+/).filter(Boolean).length; _dlog('SEND', `words=${wc} send=${wc>=MIN_WORDS_TO_INTERRUPT} "${fullTranscript.substring(0,60)}"`); // #endregion if (fullTranscript) { const wordCount = fullTranscript.split(/\s+/).filter(Boolean).length; if (wordCount >= MIN_WORDS_TO_INTERRUPT) coach.sendMessage(fullTranscript); } transcriptPartsRef.current = []; processedResultIndexRef.current = 0; setLiveTranscript(''); setIsUserSpeaking(false); }; const _resetSilenceTimer = () => { if (silenceTimerRef.current) clearTimeout(silenceTimerRef.current); silenceTimerRef.current = setTimeout(() => { if (cancelled) return; _sendAndClearTranscript(); }, SILENCE_TIMEOUT_MS); }; recognition.onspeechstart = () => { // #region agent log _dlog('SPCH-START', `tts=${coach.isTtsPlayingRef.current}`); // #endregion if (cancelled || coach.isTtsPlayingRef.current) return; setIsUserSpeaking(true); transcriptPartsRef.current = []; processedResultIndexRef.current = 0; setLiveTranscript(''); _resetSilenceTimer(); }; recognition.onresult = (event: SpeechRecognitionEvent) => { if (cancelled) return; const interimParts: string[] = []; for (let i = processedResultIndexRef.current; i < event.results.length; i++) { const r = event.results[i]; if (r.isFinal) { const text = r[0].transcript.trim(); if (text) transcriptPartsRef.current.push(text); processedResultIndexRef.current = i + 1; } else { if (coach.isTtsPlayingRef.current) continue; const text = r[0].transcript.trim(); if (text) interimParts.push(text); } } const currentInterim = interimParts.join(' '); const preview = [...transcriptPartsRef.current, currentInterim].join(' ').trim(); setLiveTranscript(preview); if (preview) _resetSilenceTimer(); const finalizedWords = transcriptPartsRef.current.join(' ').split(/\s+/).filter(Boolean).length; if (coach.isTtsPlayingRef.current && finalizedWords >= MIN_WORDS_TO_INTERRUPT) { coach.stopTts(); } }; recognition.onspeechend = () => { // #region agent log _dlog('SPCH-END', `tts=${coach.isTtsPlayingRef.current} parts=${transcriptPartsRef.current.length}`); // #endregion if (cancelled) return; if (silenceTimerRef.current) clearTimeout(silenceTimerRef.current); if (coach.isTtsPlayingRef.current) { transcriptPartsRef.current = []; processedResultIndexRef.current = 0; setLiveTranscript(''); setIsUserSpeaking(false); return; } _sendAndClearTranscript(); }; recognition.onend = () => { // #region agent log _dlog('REC-END', `cancelled=${cancelled} sameRef=${speechRecognitionRef.current===recognition} tts=${coach.isTtsPlayingRef.current}`); // #endregion if (cancelled) return; if (coach.isTtsPlayingRef.current) return; if (speechRecognitionRef.current === recognition) { try { recognition.start(); } catch { speechRecognitionRef.current = null; } } }; recognition.onerror = (event: any) => { // #region agent log _dlog('REC-ERR', event.error); // #endregion if (event.error === 'no-speech' || event.error === 'aborted') return; console.warn('SpeechRecognition error:', event.error); }; speechRecognitionRef.current = recognition; recognition.start(); } catch (err) { console.warn('Mic access failed:', err); } }; init(); return () => { // #region agent log _dlog('CLEANUP', `tab=${activeTab} sess=${coach.session?.id} muted=${coach.isMuted}`); // #endregion cancelled = true; coach.stopTts(); if (silenceTimerRef.current) clearTimeout(silenceTimerRef.current); if (speechRecognitionRef.current) { try { speechRecognitionRef.current.stop(); } catch { /* ignore */ } speechRecognitionRef.current = null; } if (streamRef.current) { streamRef.current.getTracks().forEach(t => t.stop()); streamRef.current = null; } }; }, [activeTab, coach.session?.id, coach.isMuted]); // On mobile, SpeechRecognition and Audio output conflict for the audio session. // Pause recognition while TTS plays, resume when it stops. useEffect(() => { if (!speechRecognitionRef.current) return; if (isTtsPlaying) { // #region agent log _dlog('REC-SUSPEND', 'tts started, stopping recognition'); // #endregion try { speechRecognitionRef.current.stop(); } catch { /* ignore */ } } else { // #region agent log _dlog('REC-RESUME', 'tts ended, restarting recognition'); // #endregion try { speechRecognitionRef.current.start(); } catch { /* ignore */ } } }, [isTtsPlaying, _dlog]); // Reset mute when session ends useEffect(() => { if (!coach.session) coach.setMuted(false); }, [coach.session]); // Focus input on session start useEffect(() => { if (coach.session && inputRef.current) inputRef.current.focus(); }, [coach.session]); const handleSend = useCallback(async () => { if (!coach.inputValue.trim() || coach.isStreaming) return; await coach.sendMessage(coach.inputValue); }, [coach]); const handleKeyDown = useCallback((e: React.KeyboardEvent) => { if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault(); handleSend(); } }, [handleSend]); const handleCreateContext = useCallback(async () => { if (!newTitle.trim()) return; await coach.createContext(newTitle, newDescription || undefined, newCategory); setNewTitle(''); setNewDescription(''); setNewCategory('custom'); setShowNewContext(false); }, [newTitle, newDescription, newCategory, coach]); const handleSelectContext = useCallback((contextId: string) => { coach.selectContext(contextId, { skipSessionResume: true }); }, [coach]); const handleUpload = useCallback(async (e: React.ChangeEvent) => { const file = e.target.files?.[0]; if (!file || !instanceId || !coach.selectedContextId) return; setUploading(true); try { const doc = await uploadDocumentApi(instanceId, coach.selectedContextId, file); setDocuments(prev => [doc, ...prev]); } catch { /* upload failed */ } finally { setUploading(false); e.target.value = ''; } }, [instanceId, coach.selectedContextId]); const handleDeleteDocument = useCallback(async (docId: string) => { if (!instanceId) return; try { await deleteDocumentApi(request, instanceId, docId); setDocuments(prev => prev.filter(d => d.id !== docId)); } catch { /* delete failed */ } }, [instanceId, request]); const handleDownloadDocument = useCallback(async (doc: CoachingDocument) => { if (!doc.fileRef) return; try { const response = await api.get(`/api/files/${doc.fileRef}/download`, { responseType: 'blob', }); const url = window.URL.createObjectURL(response.data); const a = document.createElement('a'); a.href = url; a.download = doc.fileName; document.body.appendChild(a); a.click(); document.body.removeChild(a); window.URL.revokeObjectURL(url); } catch (err) { console.error('Download failed:', err); } }, []); const handleAddTask = useCallback(async () => { if (!newTaskTitle.trim()) return; await coach.addTask(newTaskTitle); setNewTaskTitle(''); }, [newTaskTitle, coach]); if (coach.loadingContexts) { return

Lade...

; } return (
{/* Context Selector */}
{coach.contexts.map(ctx => ( ))}
{/* New Context Form */} {showNewContext && (
setNewTitle(e.target.value)} onKeyDown={e => e.key === 'Enter' && handleCreateContext()} autoFocus /> setNewDescription(e.target.value)} />
)} {/* No context selected */} {!coach.selectedContextId && !showNewContext && coach.contexts.length === 0 && (

Willkommen beim Kommunikations-Coach

Erstelle ein Thema, um zu beginnen.

)} {coach.selectedContextId && (<> {/* Context Header */}

{coach.selectedContext?.title}

{coach.selectedContext?.description && (

{coach.selectedContext.description}

)}
{instanceId && ( <> Export MD Export PDF )}
{/* Tab Navigation */}
{(['coaching', 'tasks', 'sessions', 'scores', 'documents'] as TabKey[]).map(tab => ( ))}
{/* ============================================================ */} {/* COACHING TAB */} {/* ============================================================ */} {activeTab === 'coaching' && (
{!coach.session ? (

Starte eine neue Coaching-Session zu diesem Thema.

{personas.length > 0 && (
{personas.map(p => ( ))}
)}
) : ( <> {/* Session Header */}
Session aktiv
{isTtsPlaying && ( )} {coach.wasInterrupted && !isTtsPlaying && ( )}
{/* Messages */}
{coach.messages.map(msg => (
{msg.content}
{msg.createdAt ? new Date(msg.createdAt).toLocaleTimeString('de-CH', { hour: '2-digit', minute: '2-digit' }) : ''}
))} {liveTranscript && (
{liveTranscript}
)} {coach.isStreaming && (
{coach.streamingMessage ? ( {coach.streamingMessage} ) : (
{coach.streamingStatus || 'Coach denkt nach'}...
)}
)}
{/* Input Area */}
{coach.isMuted ? 'Stumm – Mikrofon aus' : coach.isStreaming ? (coach.streamingStatus || 'Coach antwortet...') : isUserSpeaking ? 'Spricht...' : isListening ? 'Mikrofon an – bitte sprechen' : 'Mikrofon wird gestartet...'}