commcoach: mobile debug overlay for speech recognition diagnosis

Made-with: Cursor
This commit is contained in:
ValueOn AG 2026-03-06 14:07:00 +01:00
parent d5a4c880a4
commit b28a7fcf33

View file

@ -52,6 +52,19 @@ export const CommcoachDossierView: React.FC = () => {
const [liveTranscript, setLiveTranscript] = useState('');
const [isTtsPlaying, setIsTtsPlaying] = useState(false);
// #region agent log
const debugLogsRef = useRef<string[]>([]);
const [debugVisible, setDebugVisible] = useState(false);
const [debugSnapshot, setDebugSnapshot] = useState<string[]>([]);
const _dlog = useCallback((tag: string, info?: string) => {
const t = new Date();
const ts = `${t.getMinutes()}:${String(t.getSeconds()).padStart(2,'0')}.${String(t.getMilliseconds()).padStart(3,'0')}`;
const entry = `[${ts}] ${tag}${info ? ' ' + info : ''}`;
debugLogsRef.current.push(entry);
if (debugLogsRef.current.length > 60) debugLogsRef.current.shift();
}, []);
// #endregion
// Auto-select first context
useEffect(() => {
if (!coach.selectedContextId && coach.contexts.length > 0) {
@ -136,6 +149,10 @@ export const CommcoachDossierView: React.FC = () => {
const _sendAndClearTranscript = () => {
const fullTranscript = transcriptPartsRef.current.join(' ').trim();
// #region agent log
const wc = fullTranscript.split(/\s+/).filter(Boolean).length;
_dlog('SEND', `words=${wc} send=${wc>=MIN_WORDS_TO_INTERRUPT} "${fullTranscript.substring(0,60)}"`);
// #endregion
if (fullTranscript) {
const wordCount = fullTranscript.split(/\s+/).filter(Boolean).length;
if (wordCount >= MIN_WORDS_TO_INTERRUPT) coach.sendMessage(fullTranscript);
@ -155,6 +172,9 @@ export const CommcoachDossierView: React.FC = () => {
};
recognition.onspeechstart = () => {
// #region agent log
_dlog('SPCH-START', `tts=${coach.isTtsPlayingRef.current}`);
// #endregion
if (cancelled || coach.isTtsPlayingRef.current) return;
setIsUserSpeaking(true);
transcriptPartsRef.current = [];
@ -189,6 +209,9 @@ export const CommcoachDossierView: React.FC = () => {
};
recognition.onspeechend = () => {
// #region agent log
_dlog('SPCH-END', `tts=${coach.isTtsPlayingRef.current} parts=${transcriptPartsRef.current.length}`);
// #endregion
if (cancelled) return;
if (silenceTimerRef.current) clearTimeout(silenceTimerRef.current);
if (coach.isTtsPlayingRef.current) {
@ -202,6 +225,9 @@ export const CommcoachDossierView: React.FC = () => {
};
recognition.onend = () => {
// #region agent log
_dlog('REC-END', `cancelled=${cancelled} sameRef=${speechRecognitionRef.current===recognition}`);
// #endregion
if (cancelled) return;
setIsUserSpeaking(false);
transcriptPartsRef.current = [];
@ -212,6 +238,9 @@ export const CommcoachDossierView: React.FC = () => {
};
recognition.onerror = (event: any) => {
// #region agent log
_dlog('REC-ERR', event.error);
// #endregion
if (event.error === 'no-speech' || event.error === 'aborted') return;
console.warn('SpeechRecognition error:', event.error);
};
@ -225,6 +254,9 @@ export const CommcoachDossierView: React.FC = () => {
init();
return () => {
// #region agent log
_dlog('CLEANUP', `tab=${activeTab} sess=${coach.session?.id} muted=${coach.isMuted}`);
// #endregion
cancelled = true;
coach.stopTts();
if (silenceTimerRef.current) clearTimeout(silenceTimerRef.current);
@ -703,6 +735,19 @@ export const CommcoachDossierView: React.FC = () => {
</div>
)}
</>)}
{/* #region agent log */}
<div style={{position:'fixed',bottom:0,right:0,zIndex:9999}}>
<button
onClick={() => { setDebugSnapshot([...debugLogsRef.current]); setDebugVisible(v => !v); }}
style={{background:'#333',color:'#0f0',border:'none',padding:'4px 8px',fontSize:'10px',borderRadius:'4px 0 0 0'}}
>DBG ({debugLogsRef.current.length})</button>
{debugVisible && (
<div style={{background:'rgba(0,0,0,0.9)',color:'#0f0',fontSize:'9px',maxHeight:'40vh',overflow:'auto',padding:'4px',fontFamily:'monospace',whiteSpace:'pre-wrap',width:'100vw'}}>
{debugSnapshot.map((l,i) => <div key={i}>{l}</div>)}
</div>
)}
</div>
{/* #endregion */}
</div>
);
};