([]);
@@ -116,13 +119,13 @@ export const CommcoachDossierView: React.FC = () => {
}, [activeTab, coach.session?.id, voice]);
const handleStopTts = useCallback(() => coach.stopTts(), [coach]);
+ const handlePauseTts = useCallback(() => coach.pauseTts(), [coach]);
const handleResumeTts = useCallback(() => coach.resumeTts(), [coach]);
const handleSend = useCallback(async () => {
if (!coach.inputValue.trim() || coach.isStreaming) return;
- voice.cancelPendingSpeech();
await coach.sendMessage(coach.inputValue);
- }, [coach, voice]);
+ }, [coach]);
const handleKeyDown = useCallback((e: React.KeyboardEvent) => {
if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault(); handleSend(); }
@@ -335,7 +338,10 @@ export const CommcoachDossierView: React.FC = () => {
Session aktiv
{voice.state === 'botSpeaking' && (
-
+ <>
+
+
+ >
)}
{voice.state === 'interrupted' && coach.hasAudioToResume() && (
diff --git a/src/pages/views/commcoach/useVoiceController.ts b/src/pages/views/commcoach/useVoiceController.ts
index 71d8c4b..142d4d6 100644
--- a/src/pages/views/commcoach/useVoiceController.ts
+++ b/src/pages/views/commcoach/useVoiceController.ts
@@ -4,18 +4,15 @@
* States: idle | listening | botSpeaking | interrupted
* Muted: orthogonal boolean flag (independent of main state)
*
- * Recognition is STOPPED during botSpeaking or when muted=true.
- * Recognition is STARTED when entering listening/interrupted AND muted=false.
- * Each start() creates a fresh results session (processedIndex resets to 0).
+ * Uses the generic useVoiceStream hook for mic capture + STT streaming.
+ * Google Streaming STT handles silence detection natively.
*/
-import { useState, useRef, useCallback, useEffect } from 'react';
+import { useState, useRef, useCallback } from 'react';
+import { useVoiceStream } from '../../../hooks/useSpeechAudioCapture';
export type VoiceState = 'idle' | 'listening' | 'botSpeaking' | 'interrupted';
-const SILENCE_TIMEOUT_MS = 1000;
-const REC_AUTORESTART_DELAY_MS = 300;
-
export interface VoiceControllerApi {
state: VoiceState;
muted: boolean;
@@ -26,28 +23,25 @@ export interface VoiceControllerApi {
ttsPaused: () => void;
ttsEnded: () => void;
toggleMute: () => void;
- cancelPendingSpeech: () => void;
}
-export function useVoiceController(onMessage: (text: string) => void): VoiceControllerApi {
+export interface VoiceControllerCallbacks {
+ onFinalText?: (text: string) => void | Promise;
+ onInterimText?: (text: string) => void;
+}
+
+export function useVoiceController(callbacks: VoiceControllerCallbacks): VoiceControllerApi {
const [state, setState] = useState('idle');
const [muted, setMuted] = useState(false);
- const [liveTranscript, setLiveTranscript] = useState('');
const stateRef = useRef('idle');
const mutedRef = useRef(false);
- const streamRef = useRef(null);
- const recognitionRef = useRef(null);
- const transcriptPartsRef = useRef([]);
- const processedIndexRef = useRef(0);
- const silenceTimerRef = useRef | null>(null);
- const onMessageRef = useRef(onMessage);
- onMessageRef.current = onMessage;
+ const cbRef = useRef(callbacks);
+ cbRef.current = callbacks;
const _dlog = useCallback((tag: string, info?: string) => {
const t = new Date();
const ts = `${t.getMinutes()}:${String(t.getSeconds()).padStart(2, '0')}.${String(t.getMilliseconds()).padStart(3, '0')}`;
- const entry = `[${ts}] ${tag}${info ? ' ' + info : ''}`;
- (window as any).__dlog?.(entry);
+ (window as any).__dlog?.(`[${ts}] ${tag}${info ? ' ' + info : ''}`);
}, []);
const _setState = useCallback((next: VoiceState) => {
@@ -64,183 +58,51 @@ export function useVoiceController(onMessage: (text: string) => void): VoiceCont
_dlog('MUTED', String(next));
}, [_dlog]);
- const _cancelSilenceTimer = useCallback(() => {
- if (silenceTimerRef.current) {
- clearTimeout(silenceTimerRef.current);
- silenceTimerRef.current = null;
- }
- }, []);
-
- const _finalizeTranscript = useCallback(() => {
- const full = transcriptPartsRef.current.join(' ').trim();
- _dlog('SEND', `"${full.substring(0, 80)}"`);
- if (full) onMessageRef.current(full);
- transcriptPartsRef.current = [];
- setLiveTranscript('');
- }, [_dlog]);
-
- const _resetSilenceTimer = useCallback(() => {
- _cancelSilenceTimer();
- silenceTimerRef.current = setTimeout(() => {
- _finalizeTranscript();
- }, SILENCE_TIMEOUT_MS);
- }, [_cancelSilenceTimer, _finalizeTranscript]);
-
- const _startRecognition = useCallback(() => {
- if (mutedRef.current) return;
- const rec = recognitionRef.current;
- if (!rec) return;
- try {
- rec.start();
- _dlog('REC-START', 'ok');
- } catch {
- _dlog('REC-START', 'failed');
- }
- }, [_dlog]);
-
- const _stopRecognition = useCallback(() => {
- const rec = recognitionRef.current;
- if (!rec) return;
- try {
- rec.stop();
- } catch {
- /* ignore */
- }
- }, []);
-
- const _createRecognition = useCallback(() => {
- const SpeechRecognitionApi = (window as any).SpeechRecognition || (window as any).webkitSpeechRecognition;
- if (!SpeechRecognitionApi) return;
-
- const recognition = new SpeechRecognitionApi();
- recognition.continuous = true;
- recognition.interimResults = true;
- recognition.lang = 'de-DE';
-
- recognition.onspeechstart = () => {
- if (stateRef.current !== 'listening' && stateRef.current !== 'interrupted') return;
- _resetSilenceTimer();
- };
-
- recognition.onresult = (event: SpeechRecognitionEvent) => {
- if (stateRef.current !== 'listening' && stateRef.current !== 'interrupted') return;
- const interimParts: string[] = [];
- for (let i = processedIndexRef.current; i < event.results.length; i++) {
- const r = event.results[i];
- if (r.isFinal) {
- const text = r[0].transcript.trim();
- if (text) transcriptPartsRef.current.push(text);
- processedIndexRef.current = i + 1;
- } else {
- const text = r[0].transcript.trim();
- if (text) interimParts.push(text);
- }
- }
- const currentInterim = interimParts.join(' ');
- const preview = [...transcriptPartsRef.current, currentInterim].join(' ').trim();
- setLiveTranscript(preview);
- if (preview) _resetSilenceTimer();
- };
-
- recognition.onspeechend = () => {
- if (stateRef.current !== 'listening' && stateRef.current !== 'interrupted') return;
- _resetSilenceTimer();
- };
-
- recognition.onend = () => {
- _dlog('REC-END', `state=${stateRef.current} muted=${mutedRef.current}`);
- if (recognitionRef.current !== recognition) return;
- const cur = stateRef.current;
- if (cur === 'botSpeaking' || cur === 'idle' || mutedRef.current) return;
- processedIndexRef.current = 0;
- setTimeout(() => {
- if (recognitionRef.current !== recognition) return;
- if (stateRef.current !== 'listening' && stateRef.current !== 'interrupted') return;
- if (mutedRef.current) return;
- try {
- recognition.start();
- _dlog('REC-AUTOSTART', 'ok');
- } catch {
- _dlog('REC-AUTOSTART', 'failed');
- }
- }, REC_AUTORESTART_DELAY_MS);
- };
-
- recognition.onerror = (event: any) => {
- _dlog('REC-ERR', event.error);
- if (event.error === 'no-speech' || event.error === 'aborted') return;
- console.warn('SpeechRecognition error:', event.error);
- };
-
- recognitionRef.current = recognition;
- _startRecognition();
- }, [_dlog, _resetSilenceTimer, _startRecognition]);
+ const voiceStream = useVoiceStream({
+ onFinal: (text) => {
+ cbRef.current.onFinalText?.(text);
+ },
+ onInterim: (text) => {
+ cbRef.current.onInterimText?.(text);
+ },
+ onError: (err) => _dlog('VOICE-ERR', String(err)),
+ });
const activate = useCallback(async () => {
if (stateRef.current !== 'idle') return;
_setState('listening');
- transcriptPartsRef.current = [];
- processedIndexRef.current = 0;
- setLiveTranscript('');
-
try {
- if (!streamRef.current) {
- const stream = await navigator.mediaDevices.getUserMedia({
- audio: { echoCancellation: true, noiseSuppression: true },
- });
- streamRef.current = stream;
- }
- _createRecognition();
+ await voiceStream.start('de-DE');
} catch (err) {
- console.warn('Mic access failed:', err);
+ _dlog('MIC-ERR', String(err));
_setState('idle');
}
- }, [_setState, _createRecognition]);
+ }, [_setState, voiceStream, _dlog]);
const deactivate = useCallback(() => {
- _cancelSilenceTimer();
+ voiceStream.stop();
_setState('idle');
- if (recognitionRef.current) {
- try { recognitionRef.current.stop(); } catch { /* ignore */ }
- recognitionRef.current = null;
- }
- if (streamRef.current) {
- streamRef.current.getTracks().forEach(t => t.stop());
- streamRef.current = null;
- }
- transcriptPartsRef.current = [];
- processedIndexRef.current = 0;
- setLiveTranscript('');
- }, [_setState, _cancelSilenceTimer]);
+ }, [_setState, voiceStream]);
const ttsPlaying = useCallback(() => {
const cur = stateRef.current;
if (cur === 'idle') return;
- _cancelSilenceTimer();
- _finalizeTranscript();
- _stopRecognition();
+ voiceStream.stop();
_setState('botSpeaking');
- }, [_setState, _cancelSilenceTimer, _finalizeTranscript, _stopRecognition]);
+ }, [_setState, voiceStream]);
const ttsPaused = useCallback(() => {
- const cur = stateRef.current;
- if (cur !== 'botSpeaking') return;
- transcriptPartsRef.current = [];
- processedIndexRef.current = 0;
- setLiveTranscript('');
+ if (stateRef.current !== 'botSpeaking') return;
_setState('interrupted');
- _startRecognition();
- }, [_setState, _startRecognition]);
+ voiceStream.start('de-DE').catch((err) => _dlog('MIC-ERR', String(err)));
+ }, [_setState, voiceStream, _dlog]);
const ttsEnded = useCallback(() => {
const cur = stateRef.current;
if (cur !== 'botSpeaking' && cur !== 'interrupted') return;
- transcriptPartsRef.current = [];
- processedIndexRef.current = 0;
- setLiveTranscript('');
_setState('listening');
- _startRecognition();
- }, [_setState, _startRecognition]);
+ voiceStream.start('de-DE').catch((err) => _dlog('MIC-ERR', String(err)));
+ }, [_setState, voiceStream, _dlog]);
const toggleMute = useCallback(() => {
const cur = stateRef.current;
@@ -248,45 +110,23 @@ export function useVoiceController(onMessage: (text: string) => void): VoiceCont
if (mutedRef.current) {
_setMuted(false);
if (cur === 'listening' || cur === 'interrupted') {
- _startRecognition();
+ voiceStream.start('de-DE').catch((err) => _dlog('MIC-ERR', String(err)));
}
} else {
_setMuted(true);
- _stopRecognition();
+ voiceStream.stop();
}
- }, [_setMuted, _startRecognition, _stopRecognition]);
-
- const cancelPendingSpeech = useCallback(() => {
- _cancelSilenceTimer();
- transcriptPartsRef.current = [];
- setLiveTranscript('');
- _dlog('CANCEL-SPEECH', 'pending speech cleared for text input');
- }, [_cancelSilenceTimer, _dlog]);
-
- useEffect(() => {
- return () => {
- if (silenceTimerRef.current) clearTimeout(silenceTimerRef.current);
- if (recognitionRef.current) {
- try { recognitionRef.current.stop(); } catch { /* ignore */ }
- recognitionRef.current = null;
- }
- if (streamRef.current) {
- streamRef.current.getTracks().forEach(t => t.stop());
- streamRef.current = null;
- }
- };
- }, []);
+ }, [_setMuted, voiceStream, _dlog]);
return {
state,
muted,
- liveTranscript,
+ liveTranscript: voiceStream.interimText,
activate,
deactivate,
ttsPlaying,
ttsPaused,
ttsEnded,
toggleMute,
- cancelPendingSpeech,
};
}
diff --git a/src/pages/views/trustee/TrusteeAccountingSettingsView.tsx b/src/pages/views/trustee/TrusteeAccountingSettingsView.tsx
index 66927da..b145a0d 100644
--- a/src/pages/views/trustee/TrusteeAccountingSettingsView.tsx
+++ b/src/pages/views/trustee/TrusteeAccountingSettingsView.tsx
@@ -6,7 +6,7 @@
* testing the connection, and removing the integration.
*/
-import React, { useState, useEffect, useCallback } from 'react';
+import React, { useState, useEffect, useCallback, useRef } from 'react';
import { useCurrentInstance } from '../../../hooks/useCurrentInstance';
import { useApiRequest } from '../../../hooks/useApi';
import { useToast } from '../../../contexts/ToastContext';
@@ -35,6 +35,19 @@ export const TrusteeAccountingSettingsView: React.FC = () => {
const [saving, setSaving] = useState(false);
const [testing, setTesting] = useState(false);
const [testResult, setTestResult] = useState<{ success: boolean; message?: string } | null>(null);
+ const [importing, setImporting] = useState(false);
+ const [importDone, setImportDone] = useState(false);
+ const [importResult, setImportResult] = useState | null>(null);
+ const [importStatus, setImportStatus] = useState | null>(null);
+ const [dateFrom, setDateFrom] = useState('');
+ const [dateTo, setDateTo] = useState('');
+ const mountedRef = useRef(true);
+
+ useEffect(() => {
+ if (!importDone) return;
+ const t = setTimeout(() => { setImporting(false); setImportDone(false); }, 5000);
+ return () => clearTimeout(t);
+ }, [importDone]);
const loadData = useCallback(async () => {
if (!instanceId) return;
@@ -62,8 +75,21 @@ export const TrusteeAccountingSettingsView: React.FC = () => {
useEffect(() => {
loadData();
+ return () => { mountedRef.current = false; };
}, [loadData]);
+ const _loadImportStatus = useCallback(async () => {
+ if (!instanceId) return;
+ try {
+ const res = await request({ url: `/api/trustee/${instanceId}/accounting/import-status`, method: 'get' });
+ if (mountedRef.current) setImportStatus(res.data);
+ } catch { /* ignore */ }
+ }, [instanceId, request]);
+
+ useEffect(() => {
+ if (existingConfig?.configured) _loadImportStatus();
+ }, [existingConfig, _loadImportStatus]);
+
const _getSelectedConnector = (): AccountingConnectorInfo | undefined => {
return connectors.find(c => c.connectorType === selectedType);
};
@@ -291,6 +317,109 @@ export const TrusteeAccountingSettingsView: React.FC = () => {