/** * WorkspaceInput -- Prompt input with @file autocomplete, attachment bar, * voice toggle (generic audio capture hook), and data source selection. */ import React, { useState, useCallback, useRef, useEffect } from 'react'; import { ProviderMultiSelect } from '../../../components/ProviderSelector'; import type { ProviderSelection } from '../../../components/ProviderSelector'; import { getPageIcon } from '../../../config/pageRegistry'; import { useVoiceStream } from '../../../hooks/useSpeechAudioCapture'; import api from '../../../api'; import type { WorkspaceFile, DataSource, FeatureDataSource } from './useWorkspace'; import { useLanguage } from '../../../providers/language/LanguageContext'; import { useVoiceCatalog } from '../../../contexts/VoiceCatalogContext'; interface PendingFile { fileId: string; fileName: string; itemType?: 'file' | 'folder'; } interface TreeItemDrop { id: string; type: 'file' | 'folder'; name: string; } interface WorkspaceInputProps { instanceId: string; onSend: (prompt: string, fileIds?: string[], dataSourceIds?: string[], featureDataSourceIds?: string[], options?: { requireNeutralization?: boolean }) => void; isProcessing: boolean; onStop: () => void; files: WorkspaceFile[]; dataSources: DataSource[]; featureDataSources?: FeatureDataSource[]; pendingFiles?: PendingFile[]; onRemovePendingFile?: (fileId: string) => void; onFileUploadClick?: () => void; uploading?: boolean; providerSelection?: ProviderSelection; onProviderSelectionChange?: (selection: ProviderSelection) => void; isMobile?: boolean; onTreeItemsDrop?: (items: TreeItemDrop[]) => void; onFeatureSourceDrop?: (params: { featureInstanceId: string; featureCode: string; tableName?: string; objectKey: string; label: string; fieldName?: string }) => void; onDataSourceDrop?: (params: { connectionId: string; sourceType: string; path: string; label: string; displayPath?: string }) => void; pendingAttachDsId?: string; onPendingAttachDsConsumed?: () => void; pendingAttachFdsId?: string; onPendingAttachFdsConsumed?: () => void; onPasteAsFile?: (file: File) => void; draftAppend?: string; onDraftAppendConsumed?: () => void; /** * Per-chat attachment persistence. When the parent loads a workflow, it * passes the IDs the backend has stored for that chat plus a nonce that * increments on every load. The chip-bar is then rehydrated, dropping * any IDs that no longer resolve against the available sources. * * `workflowId` is needed so that "x" detachments can be persisted via a * PATCH call without waiting for the next sendMessage round-trip. */ workflowId?: string | null; loadedAttachedDataSourceIds?: string[]; loadedAttachedFeatureDataSourceIds?: string[]; loadedNonce?: number; } export const WorkspaceInput: React.FC = ({ instanceId, onSend, isProcessing, onStop, files, dataSources, featureDataSources = [], pendingFiles = [], onRemovePendingFile, onFileUploadClick, uploading = false, providerSelection, onProviderSelectionChange, isMobile = false, onTreeItemsDrop, onFeatureSourceDrop, onDataSourceDrop, pendingAttachDsId, onPendingAttachDsConsumed, pendingAttachFdsId, onPendingAttachFdsConsumed, onPasteAsFile, draftAppend, onDraftAppendConsumed, workflowId, loadedAttachedDataSourceIds, loadedAttachedFeatureDataSourceIds, loadedNonce, }) => { const { t } = useLanguage(); const { languages: voiceCatalogLanguages } = useVoiceCatalog(); const [prompt, setPrompt] = useState(''); const [showAutocomplete, setShowAutocomplete] = useState(false); const [autocompleteFilter, setAutocompleteFilter] = useState(''); const [treeDropOver, setTreeDropOver] = useState(false); const [voiceActive, setVoiceActive] = useState(false); const [voiceLanguage, setVoiceLanguage] = useState('de-DE'); const [showLangPicker, setShowLangPicker] = useState(false); const _sttPrefsLoaded = useRef(false); const [attachedFileIds, setAttachedFileIds] = useState([]); const [attachedDataSourceIds, setAttachedDataSourceIds] = useState([]); const [attachedFeatureDataSourceIds, setAttachedFeatureDataSourceIds] = useState([]); const [neutralizeActive, setNeutralizeActive] = useState(false); const textareaRef = useRef(null); useEffect(() => { if (draftAppend) { setPrompt(prev => prev + (prev ? '\n' : '') + draftAppend); onDraftAppendConsumed?.(); } }, [draftAppend, onDraftAppendConsumed]); // Persist a changed attachment list to the backend so the next chat // reload reflects the current state. Defined early so the // pendingAttachDsId / pendingAttachFdsId effects below can also persist // immediately after a 💬-click or drag-drop attach. const _persistAttachments = useCallback((dsIds: string[], fdsIds: string[]) => { if (!instanceId || !workflowId) return; api.patch(`/api/workspace/${instanceId}/workflows/${workflowId}/attachments`, { dataSourceIds: dsIds, featureDataSourceIds: fdsIds, }).catch(err => console.warn('Failed to persist chat attachments:', err)); }, [instanceId, workflowId]); // 💬-click or drag-drop attach: parent sets pendingAttachDsId after // creating/finding the DataSource. Add to the chip bar AND persist // immediately so a chat reload before the user sends a message still // shows the chip. useEffect(() => { if (!pendingAttachDsId) return; setAttachedDataSourceIds(prev => { if (prev.includes(pendingAttachDsId)) return prev; const next = [...prev, pendingAttachDsId]; _persistAttachments(next, attachedFeatureDataSourceIds); return next; }); onPendingAttachDsConsumed?.(); }, [pendingAttachDsId, onPendingAttachDsConsumed, _persistAttachments, attachedFeatureDataSourceIds]); useEffect(() => { if (!pendingAttachFdsId) return; setAttachedFeatureDataSourceIds(prev => { if (prev.includes(pendingAttachFdsId)) return prev; const next = [...prev, pendingAttachFdsId]; _persistAttachments(attachedDataSourceIds, next); return next; }); onPendingAttachFdsConsumed?.(); }, [pendingAttachFdsId, onPendingAttachFdsConsumed, _persistAttachments, attachedDataSourceIds]); // Rehydrate the chip-bar whenever the parent re-loads a chat (loadedNonce // bumps on every loadWorkflow call). We trust the loaded IDs initially; // a separate one-shot reconciliation below drops IDs that don't resolve // once the source lists have arrived from the backend. useEffect(() => { if (loadedNonce === undefined) return; setAttachedFileIds([]); setAttachedDataSourceIds(Array.isArray(loadedAttachedDataSourceIds) ? [...loadedAttachedDataSourceIds] : []); setAttachedFeatureDataSourceIds(Array.isArray(loadedAttachedFeatureDataSourceIds) ? [...loadedAttachedFeatureDataSourceIds] : []); }, [loadedNonce]); // Drop persisted attachment IDs that no longer resolve to an existing // source (e.g. the DataSource was deleted while the chat was closed). // // CRITICAL: this MUST run only once per chat-load (per `loadedNonce`), // and only after the source lists have actually arrived. A continuous // filter would race with `_handleDataSourceDrop` / // `_handleSendToChat_FeatureSource` in the parent: the drop sets the // chip via `pendingAttachDsId` *before* `refreshDataSources()` has // returned, so a continuous filter would briefly evict the freshly // dropped ID and the chip would visibly flash in and out. const _reconciledDsForNonce = useRef(undefined); const _reconciledFdsForNonce = useRef(undefined); useEffect(() => { if (loadedNonce === undefined) return; if (_reconciledDsForNonce.current === loadedNonce) return; if (dataSources.length === 0) return; // wait for the list to arrive _reconciledDsForNonce.current = loadedNonce; const validIds = new Set(dataSources.map(d => d.id)); setAttachedDataSourceIds(prev => { const filtered = prev.filter(id => validIds.has(id)); return filtered.length === prev.length ? prev : filtered; }); }, [loadedNonce, dataSources]); useEffect(() => { if (loadedNonce === undefined) return; if (_reconciledFdsForNonce.current === loadedNonce) return; if (featureDataSources.length === 0) return; _reconciledFdsForNonce.current = loadedNonce; const validIds = new Set(featureDataSources.map(d => d.id)); setAttachedFeatureDataSourceIds(prev => { const filtered = prev.filter(id => validIds.has(id)); return filtered.length === prev.length ? prev : filtered; }); }, [loadedNonce, featureDataSources]); const promptBeforeVoiceRef = useRef(''); const finalizedTextRef = useRef(''); const currentInterimRef = useRef(''); useEffect(() => { if (_sttPrefsLoaded.current) return; _sttPrefsLoaded.current = true; fetch('/api/voice/preferences', { credentials: 'include' }) .then(r => r.ok ? r.json() : null) .then(data => { if (data?.sttLanguage) setVoiceLanguage(data.sttLanguage); }) .catch(() => {}); }, []); const _extractFileRefs = useCallback( (text: string): string[] => { const pattern = /@([\w.\-]+)/g; const matched: string[] = []; let match; while ((match = pattern.exec(text)) !== null) { const ref = match[1]; const file = files.find( f => f.fileName === ref || f.fileName.toLowerCase() === ref.toLowerCase(), ); if (file && !matched.includes(file.id)) { matched.push(file.id); } } return matched; }, [files], ); const _handleSend = useCallback(() => { const trimmed = prompt.trim(); if (!trimmed || isProcessing) return; const inlineFileIds = _extractFileRefs(trimmed); const allFileIds = [...new Set([...attachedFileIds, ...inlineFileIds])]; const options = neutralizeActive ? { requireNeutralization: true } : undefined; onSend(trimmed, allFileIds, attachedDataSourceIds, attachedFeatureDataSourceIds, options); setPrompt(''); setShowAutocomplete(false); setAttachedFileIds([]); }, [prompt, isProcessing, _extractFileRefs, attachedFileIds, attachedDataSourceIds, attachedFeatureDataSourceIds, neutralizeActive, onSend]); const _handleKeyDown = useCallback( (e: React.KeyboardEvent) => { if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault(); _handleSend(); } }, [_handleSend], ); const _handleChange = useCallback( (e: React.ChangeEvent) => { const value = e.target.value; setPrompt(value); const cursorPos = e.target.selectionStart; const textBeforeCursor = value.slice(0, cursorPos); const atMatch = textBeforeCursor.match(/@([\w.\-]*)$/); if (atMatch) { setAutocompleteFilter(atMatch[1].toLowerCase()); setShowAutocomplete(true); } else { setShowAutocomplete(false); } }, [], ); const _insertFileRef = useCallback( (fileName: string) => { const textarea = textareaRef.current; if (!textarea) return; const cursorPos = textarea.selectionStart; const textBefore = prompt.slice(0, cursorPos); const textAfter = prompt.slice(cursorPos); const atStart = textBefore.lastIndexOf('@'); const newText = textBefore.slice(0, atStart) + `@${fileName} ` + textAfter; setPrompt(newText); setShowAutocomplete(false); textarea.focus(); }, [prompt], ); const _removeAttachedFile = useCallback((fileId: string) => { setAttachedFileIds(prev => prev.filter(id => id !== fileId)); }, []); const _removeAttachedDataSource = useCallback((dsId: string) => { setAttachedDataSourceIds(prev => { const next = prev.filter(id => id !== dsId); _persistAttachments(next, attachedFeatureDataSourceIds); return next; }); }, [_persistAttachments, attachedFeatureDataSourceIds]); const _toggleFeatureDataSource = useCallback((fdsId: string) => { setAttachedFeatureDataSourceIds(prev => { const next = prev.includes(fdsId) ? prev.filter(id => id !== fdsId) : [...prev, fdsId]; _persistAttachments(attachedDataSourceIds, next); return next; }); }, [_persistAttachments, attachedDataSourceIds]); const _buildPromptFromRefs = useCallback(() => { const parts = [ promptBeforeVoiceRef.current, finalizedTextRef.current, currentInterimRef.current, ].filter(Boolean); return parts.join(' '); }, []); const voiceStream = useVoiceStream({ onFinal: (text) => { finalizedTextRef.current = finalizedTextRef.current ? `${finalizedTextRef.current} ${text}` : text; currentInterimRef.current = ''; setPrompt(_buildPromptFromRefs()); }, onInterim: (text) => { currentInterimRef.current = text; setPrompt(_buildPromptFromRefs()); }, onError: (error) => { console.warn('Workspace voice stream error', error); }, }); const _stopVoiceCapture = useCallback(() => { if (currentInterimRef.current) { finalizedTextRef.current = finalizedTextRef.current ? `${finalizedTextRef.current} ${currentInterimRef.current}` : currentInterimRef.current; currentInterimRef.current = ''; } setPrompt(_buildPromptFromRefs()); voiceStream.stop(); setVoiceActive(false); }, [voiceStream, _buildPromptFromRefs]); const _toggleVoice = useCallback(async () => { if (voiceActive) { _stopVoiceCapture(); return; } promptBeforeVoiceRef.current = prompt; finalizedTextRef.current = ''; currentInterimRef.current = ''; try { setVoiceActive(true); await voiceStream.start(voiceLanguage); } catch { setVoiceActive(false); } }, [voiceActive, prompt, voiceStream, voiceLanguage, _stopVoiceCapture]); const filteredFiles = showAutocomplete ? files.filter(f => f.fileName.toLowerCase().includes(autocompleteFilter)) : []; const hasAttachments = attachedFileIds.length > 0 || attachedDataSourceIds.length > 0 || attachedFeatureDataSourceIds.length > 0; const _horizontalPadding = isMobile ? 12 : 24; const _controlSize = isMobile ? 38 : 40; const _handlePaste = useCallback((e: React.ClipboardEvent) => { if (!onPasteAsFile) return; const text = e.clipboardData.getData('text/plain'); if (text && text.length >= 1000) { e.preventDefault(); const blob = new Blob([text], { type: 'text/plain' }); const file = new File([blob], `pasted-text-${Date.now()}.txt`, { type: 'text/plain' }); onPasteAsFile(file); } }, [onPasteAsFile]); const _handlePromptDragOver = useCallback((e: React.DragEvent) => { if ( e.dataTransfer.types.includes('application/tree-items') || e.dataTransfer.types.includes('application/chat-id') || e.dataTransfer.types.includes('application/feature-source') || e.dataTransfer.types.includes('application/datasource') ) { e.preventDefault(); e.dataTransfer.dropEffect = 'copy'; setTreeDropOver(true); } }, []); const _handlePromptDragLeave = useCallback(() => setTreeDropOver(false), []); const _handlePromptDrop = useCallback((e: React.DragEvent) => { setTreeDropOver(false); const chatId = e.dataTransfer.getData('application/chat-id'); if (chatId) { e.preventDefault(); e.stopPropagation(); const chatLabel = e.dataTransfer.getData('text/plain'); const ref = chatLabel ? `[Chat: ${chatLabel}]` : `[Chat: ${chatId.slice(0, 8)}]`; setPrompt(prev => (prev ? `${prev} ${ref}` : ref)); return; } const featureSourceJson = e.dataTransfer.getData('application/feature-source'); if (featureSourceJson && onFeatureSourceDrop) { e.preventDefault(); e.stopPropagation(); const params = JSON.parse(featureSourceJson); onFeatureSourceDrop(params); return; } const dataSourceJson = e.dataTransfer.getData('application/datasource'); if (dataSourceJson && onDataSourceDrop) { e.preventDefault(); e.stopPropagation(); const params = JSON.parse(dataSourceJson); onDataSourceDrop(params); return; } const treeItemsJson = e.dataTransfer.getData('application/tree-items'); if (treeItemsJson && onTreeItemsDrop) { e.preventDefault(); e.stopPropagation(); const items: TreeItemDrop[] = JSON.parse(treeItemsJson); onTreeItemsDrop(items); } }, [onTreeItemsDrop, onFeatureSourceDrop, onDataSourceDrop]); return (
{/* Pending uploaded files */} {pendingFiles.length > 0 && (
{pendingFiles.map(pf => ( {pf.itemType === 'folder' ? '📁' : '📎'} {pf.fileName.length > 25 ? pf.fileName.slice(0, 25) + '...' : pf.fileName} {onRemovePendingFile && ( )} ))}
)} {/* Attachment bar */} {hasAttachments && (
{attachedFileIds.map(fId => { const file = files.find(f => f.id === fId); return ( 📄 {file?.fileName || fId} ); })} {attachedDataSourceIds.map(dsId => { const ds = dataSources.find(d => d.id === dsId); return ( 🔗 {ds?.label || ds?.path || dsId} ); })} {attachedFeatureDataSourceIds.map(fdsId => { const fds = featureDataSources.find(d => d.id === fdsId); const fdsIcon = fds ? getPageIcon(`feature.${fds.featureCode}`) : null; return ( {fdsIcon || '\uD83D\uDDC3\uFE0F'} {fds?.label || fdsId} – {fds?.tableName || ''} ); })}
)} {/* Autocomplete dropdown */} {showAutocomplete && filteredFiles.length > 0 && (
{filteredFiles.slice(0, 10).map(f => (
_insertFileRef(f.fileName)} style={{ padding: '8px 12px', cursor: 'pointer', fontSize: 13, borderBottom: '1px solid #f0f0f0', }} onMouseEnter={e => (e.currentTarget.style.background = '#f5f5f5')} onMouseLeave={e => (e.currentTarget.style.background = '')} > @{f.fileName} {f.mimeType} · {(f.fileSize / 1024).toFixed(1)}KB
))}
)} {/* Main input row */}