This commit is contained in:
Ida Dittrich 2026-03-06 15:39:06 +01:00
commit 9cf89124cd
4 changed files with 193 additions and 1025 deletions

View file

@ -16,6 +16,8 @@ import {
type CoachingTask, type CoachingScore, type SSEEvent,
} from '../api/commcoachApi';
export type TtsEvent = 'playing' | 'ended' | 'paused' | 'error';
export interface CommcoachHookReturn {
contexts: CoachingContext[];
selectedContextId: string | null;
@ -46,12 +48,11 @@ export interface CommcoachHookReturn {
completeSession: () => Promise<void>;
cancelSession: () => Promise<void>;
isMuted: boolean;
setMuted: (muted: boolean) => void;
stopTts: () => void;
resumeTts: () => void;
wasInterrupted: boolean;
isTtsPlayingRef: MutableRefObject<boolean>;
hasAudioToResume: () => boolean;
onTtsEventRef: MutableRefObject<((event: TtsEvent) => void) | null>;
actionLoading: string | null;
@ -86,14 +87,11 @@ export function useCommcoach(): CommcoachHookReturn {
const [error, setError] = useState<string | null>(null);
const [inputValue, setInputValue] = useState('');
const [isMuted, setIsMuted] = useState(false);
const [wasInterrupted, setWasInterrupted] = useState(false);
const [actionLoading, setActionLoading] = useState<string | null>(null);
const isMountedRef = useRef(true);
const currentAudioRef = useRef<HTMLAudioElement | null>(null);
const isTtsPlayingRef = useRef(false);
const lastTtsAudioRef = useRef<string | null>(null);
const onTtsEventRef = useRef<((event: TtsEvent) => void) | null>(null);
const onDocumentCreatedRef = useRef<((doc: any) => void) | null>(null);
useEffect(() => { isMountedRef.current = true; return () => { isMountedRef.current = false; }; }, []);
@ -112,44 +110,53 @@ export function useCommcoach(): CommcoachHookReturn {
}
}, [request, instanceId]);
const _emitTts = useCallback((event: TtsEvent) => {
(window as any).__dlog?.(`TTS-${event.toUpperCase()}`);
onTtsEventRef.current?.(event);
}, []);
const _playTtsAudio = useCallback((audioB64: string) => {
if (!audioB64 || !isMountedRef.current) return;
if (currentAudioRef.current) {
currentAudioRef.current.pause();
currentAudioRef.current = null;
}
lastTtsAudioRef.current = audioB64;
setWasInterrupted(false);
isTtsPlayingRef.current = true;
try {
const audio = new Audio(`data:audio/mp3;base64,${audioB64}`);
currentAudioRef.current = audio;
audio.onended = () => {
currentAudioRef.current = null;
isTtsPlayingRef.current = false;
_emitTts('ended');
};
audio.play().catch(() => { isTtsPlayingRef.current = false; });
audio.play().then(() => {
_emitTts('playing');
}).catch(() => {
_emitTts('error');
});
} catch {
isTtsPlayingRef.current = false;
_emitTts('error');
}
}, []);
}, [_emitTts]);
const stopTts = useCallback(() => {
if (currentAudioRef.current) {
currentAudioRef.current.pause();
_emitTts('paused');
}
if (isTtsPlayingRef.current) {
setWasInterrupted(true);
}
isTtsPlayingRef.current = false;
}, []);
}, [_emitTts]);
const resumeTts = useCallback(() => {
if (currentAudioRef.current && currentAudioRef.current.paused) {
isTtsPlayingRef.current = true;
setWasInterrupted(false);
currentAudioRef.current.play().catch(() => { isTtsPlayingRef.current = false; });
currentAudioRef.current.play().then(() => {
_emitTts('playing');
}).catch(() => {
_emitTts('error');
});
}
}, [_emitTts]);
const hasAudioToResume = useCallback(() => {
return !!(currentAudioRef.current && currentAudioRef.current.paused && currentAudioRef.current.currentTime > 0);
}, []);
const selectContext = useCallback(async (contextId: string, options?: { skipSessionResume?: boolean }) => {
@ -251,7 +258,6 @@ export function useCommcoach(): CommcoachHookReturn {
setActionLoading('starting');
await _unlockAudioForTts();
setError(null);
setIsMuted(false);
setIsStreaming(true);
setStreamingStatus(null);
setMessages([]);
@ -269,7 +275,6 @@ export function useCommcoach(): CommcoachHookReturn {
const sess = eventData.session;
if (sess) {
setSession(sess);
setIsMuted(false);
}
if (eventData.resumed && Array.isArray(eventData.messages)) {
setMessages(eventData.messages);
@ -332,7 +337,10 @@ export function useCommcoach(): CommcoachHookReturn {
const sendMessage = useCallback(async (content: string) => {
const normalizedContent = content.trim();
if (!normalizedContent || !instanceId || !session) return;
stopTts();
if (currentAudioRef.current) {
currentAudioRef.current.pause();
currentAudioRef.current = null;
}
await _unlockAudioForTts();
setError(null);
setIsStreaming(true);
@ -416,11 +424,14 @@ export function useCommcoach(): CommcoachHookReturn {
setIsStreaming(false);
}
}
}, [instanceId, session, _playTtsAudio, stopTts]);
}, [instanceId, session, _playTtsAudio]);
const sendAudio = useCallback(async (audioBlob: Blob) => {
if (!instanceId || !session) return;
stopTts();
if (currentAudioRef.current) {
currentAudioRef.current.pause();
currentAudioRef.current = null;
}
await _unlockAudioForTts();
setError(null);
setIsStreaming(true);
@ -482,7 +493,7 @@ export function useCommcoach(): CommcoachHookReturn {
setIsStreaming(false);
}
}
}, [instanceId, session, stopTts]);
}, [instanceId, session]);
const completeSessionCb = useCallback(async () => {
if (!instanceId || !session) return;
@ -565,7 +576,8 @@ export function useCommcoach(): CommcoachHookReturn {
selectContext, createContext, archiveContext,
startSession: startSessionCb,
sendMessage, sendAudio, completeSession: completeSessionCb, cancelSession: cancelSessionCb,
isMuted, setMuted: setIsMuted, stopTts, resumeTts, wasInterrupted, isTtsPlayingRef,
stopTts, resumeTts, hasAudioToResume,
onTtsEventRef,
actionLoading,
toggleTaskStatus, addTask, removeTask,
onDocumentCreatedRef,

View file

@ -1,402 +0,0 @@
.coaching {
display: flex;
flex-direction: column;
height: calc(100vh - 140px);
overflow: hidden;
}
/* Context Tabs */
.contextBar {
border-bottom: 1px solid var(--border-color, #e0e0e0);
padding: 0.5rem 1rem;
flex-shrink: 0;
}
.contextTabs {
display: flex;
gap: 0.5rem;
overflow-x: auto;
align-items: center;
}
.contextTab {
display: flex;
align-items: center;
gap: 0.35rem;
padding: 0.4rem 0.75rem;
border: 1px solid var(--border-color, #e0e0e0);
border-radius: 20px;
background: var(--bg-card, #fff);
cursor: pointer;
font-size: 0.8rem;
white-space: nowrap;
transition: all 0.15s;
color: var(--text-primary, #333);
}
.contextTab:hover {
background: var(--bg-hover, #f5f5f5);
}
.contextTabActive {
background: var(--primary-color, #F25843);
color: #fff;
border-color: var(--primary-color, #F25843);
}
.contextTabIcon {
font-weight: 700;
font-size: 0.75rem;
}
.contextTabLabel {
max-width: 120px;
overflow: hidden;
text-overflow: ellipsis;
}
.contextTabNew {
width: 32px;
height: 32px;
border: 1px dashed var(--border-color, #ccc);
border-radius: 50%;
background: transparent;
cursor: pointer;
font-size: 1.2rem;
display: flex;
align-items: center;
justify-content: center;
color: var(--text-secondary, #888);
flex-shrink: 0;
}
.contextTabNew:hover {
background: var(--bg-hover, #f5f5f5);
color: var(--primary-color, #F25843);
}
/* New Context Form */
.newContextForm {
padding: 1rem;
background: var(--bg-card, #fff);
border-bottom: 1px solid var(--border-color, #e0e0e0);
display: flex;
flex-direction: column;
gap: 0.5rem;
}
.newContextInput,
.newContextSelect {
padding: 0.5rem 0.75rem;
border: 1px solid var(--border-color, #ddd);
border-radius: 6px;
font-size: 0.9rem;
background: var(--bg-input, #fff);
color: var(--text-primary, #333);
}
.newContextActions {
display: flex;
gap: 0.5rem;
}
/* Buttons */
.btnPrimary {
padding: 0.5rem 1.25rem;
background: var(--primary-color, #F25843);
color: #fff;
border: none;
border-radius: 6px;
cursor: pointer;
font-size: 0.85rem;
font-weight: 500;
}
.btnPrimary:hover:not(:disabled) { filter: brightness(1.08); }
.btnPrimary:disabled {
background: var(--color-medium-gray, #ccc);
color: var(--text-secondary, #888);
cursor: not-allowed;
opacity: 0.8;
}
.btnSecondary {
padding: 0.5rem 1.25rem;
background: transparent;
color: var(--text-primary, #333);
border: 1px solid var(--border-color, #ddd);
border-radius: 6px;
cursor: pointer;
font-size: 0.85rem;
}
.btnSecondary:hover:not(:disabled) {
background: var(--hover-bg, #f5f5f5);
border-color: var(--primary-color, #F25843);
color: var(--primary-color, #F25843);
}
.btnSmall {
padding: 0.3rem 0.75rem;
background: var(--primary-color, #F25843);
color: #fff;
border: none;
border-radius: 4px;
cursor: pointer;
font-size: 0.8rem;
}
.btnSmall:hover:not(:disabled) { filter: brightness(1.08); }
.btnSmallDanger {
padding: 0.3rem 0.75rem;
background: transparent;
color: var(--error-color, #dc2626);
border: 1px solid var(--error-color, #dc2626);
border-radius: 4px;
cursor: pointer;
font-size: 0.8rem;
}
.btnSmallDanger:hover:not(:disabled) {
background: var(--error-color, #dc2626);
color: #fff;
}
/* No context */
.noContext {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
flex: 1;
text-align: center;
padding: 2rem;
color: var(--text-secondary, #666);
}
.noContext h3 {
color: var(--text-primary, #333);
margin-bottom: 0.5rem;
}
.noContext p {
margin-bottom: 1rem;
}
/* Chat Area */
.chatArea {
flex: 1;
display: flex;
flex-direction: column;
overflow: hidden;
}
.sessionStart {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
flex: 1;
text-align: center;
padding: 2rem;
}
.sessionStart h3 {
color: var(--text-primary, #333);
margin-bottom: 0.5rem;
}
.sessionStart p {
color: var(--text-secondary, #666);
margin-bottom: 1rem;
}
.sessionHeader {
display: flex;
justify-content: space-between;
align-items: center;
padding: 0.5rem 1rem;
background: var(--bg-card, #fff);
border-bottom: 1px solid var(--border-color, #e0e0e0);
flex-shrink: 0;
}
.sessionLabel {
font-size: 0.85rem;
font-weight: 500;
color: var(--text-primary, #333);
}
.sessionActions {
display: flex;
gap: 0.5rem;
}
/* Messages */
.messages {
flex: 1;
padding: 1rem;
overflow-y: auto;
display: flex;
flex-direction: column;
gap: 0.75rem;
}
.message {
max-width: 80%;
}
.messageUser {
align-self: flex-end;
}
.messageAssistant {
align-self: flex-start;
}
.messageBubble {
padding: 0.75rem 1rem;
border-radius: 12px;
font-size: 0.9rem;
line-height: 1.5;
}
.messageUser .messageBubble {
background: var(--primary-color, #F25843);
color: #fff;
border-bottom-right-radius: 4px;
}
.messageLive {
opacity: 0.7;
font-style: italic;
border: 1px dashed rgba(255, 255, 255, 0.4);
}
.messageAssistant .messageBubble {
background: var(--bg-card, #f5f5f5);
color: var(--text-primary, #333);
border: 1px solid var(--border-color, #e0e0e0);
border-bottom-left-radius: 4px;
}
.messageBubble p {
margin: 0 0 0.5rem;
}
.messageBubble p:last-child {
margin-bottom: 0;
}
.messageTime {
font-size: 0.7rem;
color: var(--text-secondary, #999);
margin-top: 0.2rem;
padding: 0 0.25rem;
}
.messageUser .messageTime {
text-align: right;
}
.typing {
color: var(--text-secondary, #888);
font-style: italic;
}
.typingDots {
animation: blink 1.4s infinite both;
}
@keyframes blink {
0%, 80%, 100% { opacity: 0; }
40% { opacity: 1; }
}
/* Input */
.inputArea {
display: flex;
flex-direction: column;
gap: 0.5rem;
padding: 0.75rem 1rem;
border-top: 1px solid var(--border-color, #e0e0e0);
background: var(--bg-card, #fff);
flex-shrink: 0;
}
.textInputRow {
display: flex;
gap: 0.5rem;
align-items: flex-end;
}
.textInput {
flex: 1;
min-width: 0;
padding: 0.6rem 0.75rem;
border: 1px solid var(--border-color, #ddd);
border-radius: 8px;
resize: none;
font-size: 0.9rem;
font-family: inherit;
min-height: 40px;
max-height: 120px;
background: var(--bg-input, #fff);
color: var(--text-primary, #333);
}
.sendBtn {
padding: 0.6rem 1.25rem;
background: var(--primary-color, #F25843);
color: #fff;
border: none;
border-radius: 8px;
cursor: pointer;
font-size: 0.85rem;
font-weight: 500;
align-self: flex-end;
}
.sendBtn:hover:not(:disabled) { filter: brightness(1.08); }
.sendBtn:disabled {
background: var(--color-medium-gray, #ccc);
color: var(--text-secondary, #888);
cursor: not-allowed;
opacity: 0.8;
}
.voiceStatus {
display: flex;
align-items: center;
padding: 0.25rem 0;
min-height: 1.5rem;
}
.voiceIndicator {
font-size: 0.9rem;
color: var(--text-secondary, #888);
}
.voiceIndicator.voiceActive {
color: var(--primary-color, #F25843);
font-weight: 500;
}
.voiceActive {
border: 2px solid #22c55e;
}
.mutedActive {
background: var(--color-medium-gray, #999);
color: #fff;
border-color: var(--color-medium-gray, #999);
}
.errorBanner {
padding: 0.5rem 1rem;
background: #fde8e8;
color: var(--color-error, #d32f2f);
font-size: 0.85rem;
text-align: center;
}

View file

@ -1,526 +0,0 @@
/**
* CommCoach Coaching View
*
* Voice first, always with text fallback (CONCEPT.md).
* Chat und Voice parallel: Mikrofon und Texteingabe gleichzeitig nutzbar.
* Mute: nur Mikrofon stummschalten, kein Moduswechsel.
*/
import React, { useState, useRef, useEffect, useCallback } from 'react';
import { useSearchParams } from 'react-router-dom';
import { useCommcoach } from '../../../hooks/useCommcoach';
import { useApiRequest } from '../../../hooks/useApi';
import { useInstanceId } from '../../../hooks/useCurrentInstance';
import { getPersonasApi, type CoachingPersona } from '../../../api/commcoachApi';
import AutoScroll from '../../../components/UiComponents/AutoScroll/AutoScroll';
import ReactMarkdown from 'react-markdown';
import remarkGfm from 'remark-gfm';
import styles from './CommcoachCoachingView.module.css';
export const CommcoachCoachingView: React.FC = () => {
const [searchParams, setSearchParams] = useSearchParams();
const coach = useCommcoach();
const { request } = useApiRequest();
const instanceId = useInstanceId();
const [showNewContext, setShowNewContext] = useState(false);
const [newTitle, setNewTitle] = useState('');
const [newDescription, setNewDescription] = useState('');
const [newCategory, setNewCategory] = useState('custom');
const inputRef = useRef<HTMLTextAreaElement>(null);
const [personas, setPersonas] = useState<CoachingPersona[]>([]);
const [selectedPersonaId, setSelectedPersonaId] = useState<string | undefined>(undefined);
const streamRef = useRef<MediaStream | null>(null);
const speechRecognitionRef = useRef<SpeechRecognition | null>(null);
const transcriptPartsRef = useRef<string[]>([]);
const processedResultIndexRef = useRef(0);
const silenceTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const [isListening, setIsListening] = useState(false);
const [isUserSpeaking, setIsUserSpeaking] = useState(false);
const [liveTranscript, setLiveTranscript] = useState('');
const [isTtsPlaying, setIsTtsPlaying] = useState(false);
const handleSend = useCallback(async () => {
if (!coach.inputValue.trim() || coach.isStreaming) return;
await coach.sendMessage(coach.inputValue);
}, [coach]);
const handleKeyDown = useCallback((e: React.KeyboardEvent) => {
if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault();
handleSend();
}
}, [handleSend]);
const handleCreateContext = useCallback(async () => {
if (!newTitle.trim()) return;
await coach.createContext(newTitle, newDescription || undefined, newCategory);
setNewTitle('');
setNewDescription('');
setNewCategory('custom');
setShowNewContext(false);
}, [newTitle, newDescription, newCategory, coach]);
useEffect(() => {
const contextId = searchParams.get('context');
if (contextId && coach.contexts.some(c => c.id === contextId)) {
coach.selectContext(contextId);
setSearchParams({}, { replace: true });
}
}, [searchParams, coach.contexts, coach.selectContext, setSearchParams]);
useEffect(() => {
if (coach.session && inputRef.current) {
inputRef.current.focus();
}
}, [coach.session]);
useEffect(() => {
if (!coach.session) {
coach.setMuted(false);
}
}, [coach.session]);
useEffect(() => {
if (!instanceId) return;
getPersonasApi(request, instanceId)
.then(p => setPersonas(p))
.catch(() => {});
}, [instanceId, request]);
useEffect(() => {
if (!coach.session) return;
const interval = setInterval(() => {
setIsTtsPlaying(coach.isTtsPlayingRef.current);
}, 200);
return () => clearInterval(interval);
}, [coach.session, coach.isTtsPlayingRef]);
useEffect(() => {
if (!coach.session || coach.isMuted) {
if (speechRecognitionRef.current) {
try {
speechRecognitionRef.current.stop();
} catch {
// ignore
}
speechRecognitionRef.current = null;
}
if (streamRef.current) {
streamRef.current.getTracks().forEach((t) => t.stop());
streamRef.current = null;
}
setIsListening(false);
setIsUserSpeaking(false);
return;
}
const SpeechRecognitionApi = (window as any).SpeechRecognition || (window as any).webkitSpeechRecognition;
if (!SpeechRecognitionApi) {
console.warn('SpeechRecognition not supported');
return;
}
let cancelled = false;
const MIN_WORDS_TO_INTERRUPT = 2;
const lang = 'de-DE';
const init = async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({
audio: { echoCancellation: true, noiseSuppression: true },
});
if (cancelled) {
stream.getTracks().forEach((t) => t.stop());
return;
}
streamRef.current = stream;
setIsListening(true);
const recognition = new SpeechRecognitionApi();
recognition.continuous = true;
recognition.interimResults = true;
recognition.lang = lang;
recognition.onstart = () => {
if (cancelled) return;
};
const SILENCE_TIMEOUT_MS = 5000;
const _sendAndClearTranscript = () => {
const fullTranscript = transcriptPartsRef.current.join(' ').trim();
if (fullTranscript) {
const wordCount = fullTranscript.split(/\s+/).filter(Boolean).length;
if (wordCount >= MIN_WORDS_TO_INTERRUPT) coach.sendMessage(fullTranscript);
}
transcriptPartsRef.current = [];
processedResultIndexRef.current = 0;
setLiveTranscript('');
setIsUserSpeaking(false);
};
const _resetSilenceTimer = () => {
if (silenceTimerRef.current) clearTimeout(silenceTimerRef.current);
silenceTimerRef.current = setTimeout(() => {
if (cancelled) return;
_sendAndClearTranscript();
}, SILENCE_TIMEOUT_MS);
};
recognition.onspeechstart = () => {
if (cancelled || coach.isTtsPlayingRef.current) return;
setIsUserSpeaking(true);
transcriptPartsRef.current = [];
processedResultIndexRef.current = 0;
setLiveTranscript('');
_resetSilenceTimer();
};
recognition.onresult = (event: SpeechRecognitionEvent) => {
if (cancelled || coach.isTtsPlayingRef.current) return;
const interimParts: string[] = [];
for (let i = processedResultIndexRef.current; i < event.results.length; i++) {
const r = event.results[i];
if (r.isFinal) {
const text = r[0].transcript.trim();
if (text) transcriptPartsRef.current.push(text);
processedResultIndexRef.current = i + 1;
} else {
const text = r[0].transcript.trim();
if (text) interimParts.push(text);
}
}
const currentInterim = interimParts.join(' ');
const preview = [...transcriptPartsRef.current, currentInterim].join(' ').trim();
setLiveTranscript(preview);
if (preview) _resetSilenceTimer();
const totalWords = preview.split(/\s+/).filter(Boolean).length;
if (totalWords >= MIN_WORDS_TO_INTERRUPT) coach.stopTts();
};
recognition.onspeechend = () => {
if (cancelled) return;
if (silenceTimerRef.current) clearTimeout(silenceTimerRef.current);
if (coach.isTtsPlayingRef.current) {
transcriptPartsRef.current = [];
processedResultIndexRef.current = 0;
setLiveTranscript('');
setIsUserSpeaking(false);
return;
}
_sendAndClearTranscript();
};
recognition.onend = () => {
if (cancelled) return;
setIsUserSpeaking(false);
transcriptPartsRef.current = [];
setLiveTranscript('');
if (speechRecognitionRef.current === recognition) {
try {
recognition.start();
} catch {
speechRecognitionRef.current = null;
}
}
};
recognition.onerror = (event: any) => {
if (event.error === 'no-speech' || event.error === 'aborted') return;
console.warn('SpeechRecognition error:', event.error);
};
speechRecognitionRef.current = recognition;
recognition.start();
} catch (err) {
console.warn('Mic access failed:', err);
}
};
init();
return () => {
cancelled = true;
coach.stopTts();
if (silenceTimerRef.current) clearTimeout(silenceTimerRef.current);
if (speechRecognitionRef.current) {
try {
speechRecognitionRef.current.stop();
} catch {
// ignore
}
speechRecognitionRef.current = null;
}
if (streamRef.current) {
streamRef.current.getTracks().forEach((t) => t.stop());
streamRef.current = null;
}
};
}, [coach.session, coach.isMuted, coach.stopTts, coach.sendMessage]);
return (
<div className={styles.coaching}>
{/* Context Tabs */}
<div className={styles.contextBar}>
<div className={styles.contextTabs}>
{coach.contexts.map(ctx => (
<button
key={ctx.id}
className={`${styles.contextTab} ${coach.selectedContextId === ctx.id ? styles.contextTabActive : ''}`}
onClick={() => coach.selectContext(ctx.id)}
title={ctx.title}
>
<span className={styles.contextTabIcon}>{_categoryIcon(ctx.category)}</span>
<span className={styles.contextTabLabel}>{ctx.title}</span>
</button>
))}
<button
className={styles.contextTabNew}
onClick={() => setShowNewContext(!showNewContext)}
title="Neues Thema"
>
+
</button>
</div>
</div>
{/* New Context Form */}
{showNewContext && (
<div className={styles.newContextForm}>
<input
className={styles.newContextInput}
placeholder="Thema / Titel..."
value={newTitle}
onChange={e => setNewTitle(e.target.value)}
onKeyDown={e => e.key === 'Enter' && handleCreateContext()}
autoFocus
/>
<input
className={styles.newContextInput}
placeholder="Beschreibung (optional)"
value={newDescription}
onChange={e => setNewDescription(e.target.value)}
/>
<select
className={styles.newContextSelect}
value={newCategory}
onChange={e => setNewCategory(e.target.value)}
>
<option value="custom">Individuell</option>
<option value="leadership">Führung</option>
<option value="conflict">Konflikt</option>
<option value="negotiation">Verhandlung</option>
<option value="presentation">Präsentation</option>
<option value="feedback">Feedback</option>
<option value="delegation">Delegation</option>
<option value="changeManagement">Change Management</option>
</select>
<div className={styles.newContextActions}>
<button className={styles.btnPrimary} onClick={handleCreateContext} disabled={!newTitle.trim() || !!coach.actionLoading}>
{coach.actionLoading === 'creating' ? 'Wird erstellt...' : 'Erstellen'}
</button>
<button className={styles.btnSecondary} onClick={() => setShowNewContext(false)}>
Abbrechen
</button>
</div>
</div>
)}
{/* No Context Selected */}
{!coach.selectedContextId && !showNewContext && (
<div className={styles.noContext}>
<h3>Willkommen beim Kommunikations-Coach</h3>
<p>Wähle ein bestehendes Thema oder erstelle ein neues, um zu beginnen.</p>
<button className={styles.btnPrimary} onClick={() => setShowNewContext(true)}>
Neues Thema erstellen
</button>
</div>
)}
{/* Chat Area */}
{coach.selectedContextId && (
<div className={styles.chatArea}>
{/* Session controls */}
{!coach.session && (
<div className={styles.sessionStart}>
<h3>{coach.selectedContext?.title}</h3>
<p>{coach.selectedContext?.description || 'Starte eine neue Coaching-Session zu diesem Thema.'}</p>
{personas.length > 0 && (
<div className={styles.personaSelector}>
<label className={styles.personaLabel}>Gesprächspartner wählen:</label>
<div className={styles.personaGrid}>
{personas.map(p => (
<button
key={p.id}
className={`${styles.personaChip} ${selectedPersonaId === p.id ? styles.personaChipActive : ''}`}
onClick={() => setSelectedPersonaId(selectedPersonaId === p.id ? undefined : p.id)}
title={p.description}
>
<span className={styles.personaGender}>
{p.gender === 'f' ? '\u2640' : p.gender === 'm' ? '\u2642' : '\u25CB'}
</span>
<span className={styles.personaName}>{p.label}</span>
</button>
))}
</div>
</div>
)}
<button
className={styles.btnPrimary}
onClick={() => coach.startSession(selectedPersonaId)}
disabled={!!coach.actionLoading}
>
{coach.actionLoading === 'starting'
? 'Wird gestartet...'
: selectedPersonaId && personas.find(p => p.id === selectedPersonaId)
? `Session starten mit ${personas.find(p => p.id === selectedPersonaId)!.label}`
: 'Session starten'}
</button>
</div>
)}
{/* Messages */}
{coach.session && (
<>
<div className={styles.sessionHeader}>
<span className={styles.sessionLabel}>
Session aktiv - {coach.selectedContext?.title}
</span>
<div className={styles.sessionActions}>
{isTtsPlaying && (
<button className={styles.btnSmallDanger} onClick={coach.stopTts}>
Stop
</button>
)}
{coach.wasInterrupted && !isTtsPlaying && (
<button className={styles.btnSmall} onClick={coach.resumeTts}>
Weitersprechen
</button>
)}
<button
className={`${styles.btnSmall} ${coach.isMuted ? styles.mutedActive : ''}`}
onClick={() => coach.setMuted(!coach.isMuted)}
title={coach.isMuted ? 'Stummschaltung aufheben' : 'Stummschalten'}
>
{coach.isMuted ? '\u{1F507}' : '\u{1F3A4}'} {coach.isMuted ? 'Stumm' : 'Ton an'}
</button>
<button
className={styles.btnSmall}
onClick={coach.completeSession}
disabled={!!coach.actionLoading}
>
{coach.actionLoading === 'completing' ? 'Wird abgeschlossen...' : 'Abschliessen'}
</button>
<button
className={styles.btnSmallDanger}
onClick={coach.cancelSession}
disabled={!!coach.actionLoading}
>
{coach.actionLoading === 'cancelling' ? 'Wird abgebrochen...' : 'Abbrechen'}
</button>
</div>
</div>
<AutoScroll scrollDependency={coach.messages.length + (coach.isStreaming ? 1 : 0) + liveTranscript.length}>
<div className={styles.messages}>
{coach.messages.map(msg => (
<div
key={msg.id}
className={`${styles.message} ${msg.role === 'user' ? styles.messageUser : styles.messageAssistant}`}
>
<div className={styles.messageBubble}>
<ReactMarkdown remarkPlugins={[remarkGfm]}>
{msg.content}
</ReactMarkdown>
</div>
<div className={styles.messageTime}>
{msg.createdAt ? new Date(msg.createdAt).toLocaleTimeString('de-CH', { hour: '2-digit', minute: '2-digit' }) : ''}
</div>
</div>
))}
{liveTranscript && (
<div className={`${styles.message} ${styles.messageUser}`}>
<div className={`${styles.messageBubble} ${styles.messageLive}`}>
{liveTranscript}
</div>
</div>
)}
{coach.isStreaming && (
<div className={`${styles.message} ${styles.messageAssistant}`}>
<div className={styles.messageBubble}>
{coach.streamingMessage ? (
<ReactMarkdown remarkPlugins={[remarkGfm]}>
{coach.streamingMessage}
</ReactMarkdown>
) : (
<div className={styles.typing}>
{coach.streamingStatus || 'Coach denkt nach'}
<span className={styles.typingDots}>...</span>
</div>
)}
</div>
</div>
)}
</div>
</AutoScroll>
{/* Input: Chat und Voice parallel (CONCEPT: Voice first, always with text fallback) */}
<div className={styles.inputArea}>
<div className={styles.voiceStatus}>
<span className={`${styles.voiceIndicator} ${isListening && !coach.isMuted ? styles.voiceActive : ''}`}>
{coach.isMuted
? 'Stumm Mikrofon aus'
: coach.isStreaming
? (coach.streamingStatus || 'Coach antwortet...')
: isUserSpeaking
? 'Spricht...'
: isListening
? 'Mikrofon an bitte sprechen'
: 'Mikrofon wird gestartet...'}
</span>
</div>
<div className={styles.textInputRow}>
<textarea
ref={inputRef}
className={styles.textInput}
placeholder="Nachricht eingeben..."
value={coach.inputValue}
onChange={e => coach.setInputValue(e.target.value)}
onKeyDown={handleKeyDown}
rows={1}
disabled={coach.isStreaming}
/>
<button
className={styles.sendBtn}
onClick={handleSend}
disabled={!coach.inputValue.trim() || coach.isStreaming}
>
Senden
</button>
</div>
</div>
</>
)}
{/* Error */}
{coach.error && (
<div className={styles.errorBanner}>{coach.error}</div>
)}
</div>
)}
</div>
);
};
function _categoryIcon(category: string): string {
const icons: Record<string, string> = {
leadership: 'L', conflict: 'K', negotiation: 'V',
presentation: 'P', feedback: 'F', delegation: 'D',
changeManagement: 'C', custom: '*',
};
return icons[category] || '*';
}
export default CommcoachCoachingView;

View file

@ -6,7 +6,7 @@
*/
import React, { useState, useRef, useCallback, useEffect } from 'react';
import { useCommcoach } from '../../../hooks/useCommcoach';
import { useCommcoach, type TtsEvent } from '../../../hooks/useCommcoach';
import { useApiRequest } from '../../../hooks/useApi';
import { useInstanceId } from '../../../hooks/useCurrentInstance';
import api from '../../../api';
@ -23,6 +23,17 @@ import styles from './CommcoachDossierView.module.css';
type TabKey = 'coaching' | 'tasks' | 'sessions' | 'scores' | 'documents';
/**
* Voice State Machine
*
* idle no session active, everything off
* listening mic on, recognition active, TTS off
* botSpeaking TTS playing, mic/recognition suspended
* interrupted TTS paused (resumable), mic on, recognition active
* muted mic off, TTS continues if playing
*/
type VoiceState = 'idle' | 'listening' | 'botSpeaking' | 'interrupted' | 'muted';
export const CommcoachDossierView: React.FC = () => {
const coach = useCommcoach();
const { request } = useApiRequest();
@ -47,10 +58,51 @@ export const CommcoachDossierView: React.FC = () => {
const transcriptPartsRef = useRef<string[]>([]);
const processedResultIndexRef = useRef(0);
const silenceTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const [isListening, setIsListening] = useState(false);
const [isUserSpeaking, setIsUserSpeaking] = useState(false);
const [liveTranscript, setLiveTranscript] = useState('');
const [isTtsPlaying, setIsTtsPlaying] = useState(false);
// Voice State Machine
const [voiceState, setVoiceState] = useState<VoiceState>('idle');
const voiceStateRef = useRef<VoiceState>('idle');
// #region agent log
const debugLogsRef = useRef<string[]>([]);
const [debugVisible, setDebugVisible] = useState(false);
const [debugSnapshot, setDebugSnapshot] = useState<string[]>([]);
const _dlog = useCallback((tag: string, info?: string) => {
const t = new Date();
const ts = `${t.getMinutes()}:${String(t.getSeconds()).padStart(2,'0')}.${String(t.getMilliseconds()).padStart(3,'0')}`;
const entry = `[${ts}] ${tag}${info ? ' ' + info : ''}`;
debugLogsRef.current.push(entry);
if (debugLogsRef.current.length > 80) debugLogsRef.current.shift();
}, []);
useEffect(() => { (window as any).__dlog = _dlog; return () => { delete (window as any).__dlog; }; }, [_dlog]);
// #endregion
const _transitionVoice = useCallback((next: VoiceState) => {
const prev = voiceStateRef.current;
if (prev === next) return;
_dlog('VOICE', `${prev} -> ${next}`);
voiceStateRef.current = next;
setVoiceState(next);
}, [_dlog]);
// Subscribe to TTS events from the hook
useEffect(() => {
coach.onTtsEventRef.current = (event: TtsEvent) => {
const cur = voiceStateRef.current;
if (event === 'playing') {
if (cur !== 'muted') _transitionVoice('botSpeaking');
} else if (event === 'ended') {
if (cur === 'botSpeaking') _transitionVoice('listening');
if (cur === 'interrupted') _transitionVoice('listening');
} else if (event === 'paused') {
if (cur === 'botSpeaking') _transitionVoice('interrupted');
} else if (event === 'error') {
if (cur === 'botSpeaking') _transitionVoice('listening');
}
};
return () => { coach.onTtsEventRef.current = null; };
}, [coach.onTtsEventRef, _transitionVoice]);
// Auto-select first context
useEffect(() => {
@ -87,55 +139,64 @@ export const CommcoachDossierView: React.FC = () => {
.catch(() => {});
}, [instanceId, request]);
// TTS playing state sync
// Transition to idle when session ends or tab changes away
useEffect(() => {
if (!coach.session) return;
const interval = setInterval(() => {
setIsTtsPlaying(coach.isTtsPlayingRef.current);
}, 200);
return () => clearInterval(interval);
}, [coach.session, coach.isTtsPlayingRef]);
if (activeTab !== 'coaching' || !coach.session) {
_transitionVoice('idle');
} else if (voiceStateRef.current === 'idle') {
_transitionVoice('listening');
}
}, [activeTab, coach.session?.id, _transitionVoice]);
// Speech Recognition (only when coaching tab active + session running + not muted)
// Hardware control: start/stop recognition + mic based on voiceState
useEffect(() => {
if (activeTab !== 'coaching' || !coach.session || coach.isMuted) {
const micShouldBeOn = voiceState === 'listening' || voiceState === 'interrupted';
const micShouldBeOff = voiceState === 'idle' || voiceState === 'botSpeaking' || voiceState === 'muted';
if (micShouldBeOff) {
if (speechRecognitionRef.current) {
try { speechRecognitionRef.current.stop(); } catch { /* ignore */ }
speechRecognitionRef.current = null;
}
if (streamRef.current) {
if (voiceState === 'idle' && streamRef.current) {
streamRef.current.getTracks().forEach(t => t.stop());
streamRef.current = null;
speechRecognitionRef.current = null;
}
setIsListening(false);
setIsUserSpeaking(false);
return;
}
if (!micShouldBeOn) return;
const SpeechRecognitionApi = (window as any).SpeechRecognition || (window as any).webkitSpeechRecognition;
if (!SpeechRecognitionApi) return;
if (speechRecognitionRef.current) {
try { speechRecognitionRef.current.start(); } catch { /* already running */ }
return;
}
let cancelled = false;
const SILENCE_TIMEOUT_MS = 1500;
const MIN_WORDS_TO_INTERRUPT = 4;
const init = async () => {
try {
if (!streamRef.current) {
const stream = await navigator.mediaDevices.getUserMedia({
audio: { echoCancellation: true, noiseSuppression: true },
});
if (cancelled) { stream.getTracks().forEach(t => t.stop()); return; }
streamRef.current = stream;
setIsListening(true);
}
const recognition = new SpeechRecognitionApi();
recognition.continuous = true;
recognition.interimResults = true;
recognition.lang = 'de-DE';
const SILENCE_TIMEOUT_MS = 1500;
const _sendAndClearTranscript = () => {
const fullTranscript = transcriptPartsRef.current.join(' ').trim();
_dlog('SEND', `words=${fullTranscript.split(/\s+/).filter(Boolean).length} "${fullTranscript.substring(0,60)}"`);
if (fullTranscript) {
const wordCount = fullTranscript.split(/\s+/).filter(Boolean).length;
if (wordCount >= MIN_WORDS_TO_INTERRUPT) coach.sendMessage(fullTranscript);
@ -143,7 +204,6 @@ export const CommcoachDossierView: React.FC = () => {
transcriptPartsRef.current = [];
processedResultIndexRef.current = 0;
setLiveTranscript('');
setIsUserSpeaking(false);
};
const _resetSilenceTimer = () => {
@ -155,8 +215,7 @@ export const CommcoachDossierView: React.FC = () => {
};
recognition.onspeechstart = () => {
if (cancelled || coach.isTtsPlayingRef.current) return;
setIsUserSpeaking(true);
if (cancelled || voiceStateRef.current === 'botSpeaking') return;
transcriptPartsRef.current = [];
processedResultIndexRef.current = 0;
setLiveTranscript('');
@ -165,53 +224,50 @@ export const CommcoachDossierView: React.FC = () => {
recognition.onresult = (event: SpeechRecognitionEvent) => {
if (cancelled) return;
const isBotSpeaking = voiceStateRef.current === 'botSpeaking';
const interimParts: string[] = [];
for (let i = processedResultIndexRef.current; i < event.results.length; i++) {
const r = event.results[i];
if (r.isFinal) {
const text = r[0].transcript.trim();
if (text) transcriptPartsRef.current.push(text);
if (text && !isBotSpeaking) transcriptPartsRef.current.push(text);
processedResultIndexRef.current = i + 1;
} else {
if (coach.isTtsPlayingRef.current) continue;
if (isBotSpeaking) continue;
const text = r[0].transcript.trim();
if (text) interimParts.push(text);
}
}
if (isBotSpeaking) return;
const currentInterim = interimParts.join(' ');
const preview = [...transcriptPartsRef.current, currentInterim].join(' ').trim();
setLiveTranscript(preview);
if (preview) _resetSilenceTimer();
const finalizedWords = transcriptPartsRef.current.join(' ').split(/\s+/).filter(Boolean).length;
if (coach.isTtsPlayingRef.current && finalizedWords >= MIN_WORDS_TO_INTERRUPT) {
coach.stopTts();
}
};
recognition.onspeechend = () => {
if (cancelled) return;
if (silenceTimerRef.current) clearTimeout(silenceTimerRef.current);
if (coach.isTtsPlayingRef.current) {
if (voiceStateRef.current === 'botSpeaking') {
transcriptPartsRef.current = [];
processedResultIndexRef.current = 0;
setLiveTranscript('');
setIsUserSpeaking(false);
return;
}
_sendAndClearTranscript();
};
recognition.onend = () => {
_dlog('REC-END', `state=${voiceStateRef.current}`);
if (cancelled) return;
setIsUserSpeaking(false);
transcriptPartsRef.current = [];
setLiveTranscript('');
if (voiceStateRef.current === 'botSpeaking' || voiceStateRef.current === 'muted' || voiceStateRef.current === 'idle') return;
if (speechRecognitionRef.current === recognition) {
try { recognition.start(); } catch { speechRecognitionRef.current = null; }
}
};
recognition.onerror = (event: any) => {
_dlog('REC-ERR', event.error);
if (event.error === 'no-speech' || event.error === 'aborted') return;
console.warn('SpeechRecognition error:', event.error);
};
@ -224,9 +280,12 @@ export const CommcoachDossierView: React.FC = () => {
};
init();
return () => { cancelled = true; };
}, [voiceState, _dlog, coach]);
// Cleanup on unmount
useEffect(() => {
return () => {
cancelled = true;
coach.stopTts();
if (silenceTimerRef.current) clearTimeout(silenceTimerRef.current);
if (speechRecognitionRef.current) {
try { speechRecognitionRef.current.stop(); } catch { /* ignore */ }
@ -237,17 +296,27 @@ export const CommcoachDossierView: React.FC = () => {
streamRef.current = null;
}
};
}, [activeTab, coach.session?.id, coach.isMuted]);
}, []);
// Reset mute when session ends
useEffect(() => {
if (!coach.session) coach.setMuted(false);
}, [coach.session]);
// Voice actions
const handleStopTts = useCallback(() => {
coach.stopTts();
}, [coach]);
// Focus input on session start
useEffect(() => {
if (coach.session && inputRef.current) inputRef.current.focus();
}, [coach.session]);
const handleResumeTts = useCallback(() => {
coach.resumeTts();
}, [coach]);
const handleToggleMute = useCallback(() => {
const cur = voiceStateRef.current;
if (cur === 'muted') {
_transitionVoice('listening');
} else if (cur === 'listening' || cur === 'interrupted') {
_transitionVoice('muted');
} else if (cur === 'botSpeaking') {
_transitionVoice('muted');
}
}, [_transitionVoice]);
const handleSend = useCallback(async () => {
if (!coach.inputValue.trim() || coach.isStreaming) return;
@ -464,18 +533,18 @@ export const CommcoachDossierView: React.FC = () => {
<div className={styles.sessionHeader}>
<span className={styles.sessionLabel}>Session aktiv</span>
<div className={styles.sessionActions}>
{isTtsPlaying && (
<button className={styles.btnSmallDanger} onClick={coach.stopTts}>Stop</button>
{voiceState === 'botSpeaking' && (
<button className={styles.btnSmallDanger} onClick={handleStopTts}>Stop</button>
)}
{coach.wasInterrupted && !isTtsPlaying && (
<button className={styles.btnSmall} onClick={coach.resumeTts}>Weitersprechen</button>
{voiceState === 'interrupted' && coach.hasAudioToResume() && (
<button className={styles.btnSmall} onClick={handleResumeTts}>Weitersprechen</button>
)}
<button
className={`${styles.btnSmall} ${coach.isMuted ? styles.mutedActive : ''}`}
onClick={() => coach.setMuted(!coach.isMuted)}
title={coach.isMuted ? 'Stummschaltung aufheben' : 'Stummschalten'}
className={`${styles.btnSmall} ${voiceState === 'muted' ? styles.mutedActive : ''}`}
onClick={handleToggleMute}
title={voiceState === 'muted' ? 'Stummschaltung aufheben' : 'Stummschalten'}
>
{coach.isMuted ? '\u{1F507} Stumm' : '\u{1F3A4} Ton an'}
{voiceState === 'muted' ? '\u{1F507} Stumm' : '\u{1F3A4} Ton an'}
</button>
<button className={styles.btnSmall} onClick={coach.completeSession} disabled={!!coach.actionLoading}>
{coach.actionLoading === 'completing' ? 'Wird abgeschlossen...' : 'Abschliessen'}
@ -521,15 +590,17 @@ export const CommcoachDossierView: React.FC = () => {
{/* Input Area */}
<div className={styles.inputArea}>
<div className={styles.voiceStatus}>
<span className={`${styles.voiceIndicator} ${isListening && !coach.isMuted ? styles.voiceActive : ''}`}>
{coach.isMuted
<span className={`${styles.voiceIndicator} ${voiceState === 'listening' ? styles.voiceActive : ''}`}>
{voiceState === 'muted'
? 'Stumm Mikrofon aus'
: voiceState === 'botSpeaking'
? (coach.streamingStatus || 'Coach spricht...')
: coach.isStreaming
? (coach.streamingStatus || 'Coach antwortet...')
: isUserSpeaking
? 'Spricht...'
: isListening
? 'Mikrofon an bitte sprechen'
? (coach.streamingStatus || 'Coach denkt nach...')
: voiceState === 'interrupted'
? 'Unterbrochen Mikrofon an'
: voiceState === 'listening'
? (liveTranscript ? 'Spricht...' : 'Mikrofon an bitte sprechen')
: 'Mikrofon wird gestartet...'}
</span>
</div>
@ -703,6 +774,19 @@ export const CommcoachDossierView: React.FC = () => {
</div>
)}
</>)}
{/* #region agent log */}
<div style={{position:'fixed',bottom:0,right:0,zIndex:9999}}>
<button
onClick={() => { setDebugSnapshot([...debugLogsRef.current]); setDebugVisible(v => !v); }}
style={{background:'#333',color:'#0f0',border:'none',padding:'4px 8px',fontSize:'10px',borderRadius:'4px 0 0 0'}}
>DBG ({debugLogsRef.current.length})</button>
{debugVisible && (
<div style={{background:'rgba(0,0,0,0.9)',color:'#0f0',fontSize:'9px',maxHeight:'40vh',overflow:'auto',padding:'4px',fontFamily:'monospace',whiteSpace:'pre-wrap',width:'100vw'}}>
{debugSnapshot.map((l,i) => <div key={i}>{l}</div>)}
</div>
)}
</div>
{/* #endregion */}
</div>
);
};