frontend_nyla/src/hooks/useFiles.ts
2026-03-16 11:38:29 +01:00

971 lines
No EOL
35 KiB
TypeScript

import { useState, useEffect, useCallback } from 'react';
import api from '../api';
import { useToast } from '../contexts/ToastContext';
import { useLanguage } from '../providers/language/LanguageContext';
import { getUserDataCache } from '../utils/userCache';
import { useApiRequest } from './useApi';
import { usePermissions, type UserPermissions } from './usePermissions';
import {
fetchFileAttributes as _fetchFileAttributes,
fetchFiles as fetchFilesApi,
fetchFileById as fetchFileByIdApi,
updateFile as updateFileApi,
deleteFile as deleteFileApi,
deleteFiles as deleteFilesApi
} from '../api/fileApi';
// File interfaces - exactly matching backend FileItem model
export interface FileInfo {
id: string;
mandateId: string; // Required in backend
fileName: string; // Required in backend
mimeType: string;
fileHash: string;
fileSize: number;
creationDate: number; // Backend uses float for UTC timestamp in seconds
}
// UserFile is now just the backend response - no mapping needed
// Field names come directly from backend attributes
export type UserFile = any;
// Attribute definition interface (local definition, not imported to avoid conflicts)
export interface AttributeDefinition {
name: string;
label: string;
type: 'string' | 'number' | 'date' | 'boolean' | 'enum';
sortable?: boolean;
filterable?: boolean;
searchable?: boolean;
width?: number;
minWidth?: number;
maxWidth?: number;
filterOptions?: string[]; // For enum types
}
// Pagination parameters (local definition, not imported to avoid conflicts)
export interface PaginationParams {
page?: number;
pageSize?: number;
sort?: Array<{ field: string; direction: 'asc' | 'desc' }>;
filters?: Record<string, any>;
search?: string;
}
// Files list hook
export function useUserFiles() {
const [files, setFiles] = useState<UserFile[]>([]);
const [attributes, setAttributes] = useState<AttributeDefinition[]>([]);
const [permissions, setPermissions] = useState<UserPermissions | null>(null);
const [pagination, setPagination] = useState<{
currentPage: number;
pageSize: number;
totalItems: number;
totalPages: number;
} | null>(null);
const { request, isLoading: loading, error } = useApiRequest<null, UserFile[]>();
const { checkPermission } = usePermissions();
// Fetch attributes from backend
const fetchAttributes = useCallback(async () => {
try {
// Note: fetchFileAttributes uses api.get directly due to response format handling
// For now, we'll use api.get directly here as well
const response = await api.get('/api/attributes/FileItem');
// Extract attributes from response - check if response.data.attributes exists, otherwise check if response.data is an array
let attrs: AttributeDefinition[] = [];
if (response.data?.attributes && Array.isArray(response.data.attributes)) {
attrs = response.data.attributes;
} else if (Array.isArray(response.data)) {
attrs = response.data;
} else if (response.data && typeof response.data === 'object') {
// Try to find any array property in the response
const keys = Object.keys(response.data);
for (const key of keys) {
if (Array.isArray(response.data[key])) {
attrs = response.data[key];
break;
}
}
}
setAttributes(attrs);
return attrs;
} catch (error: any) {
console.error('Error fetching attributes:', error);
setAttributes([]);
return [];
}
}, []);
// Fetch permissions from backend
const fetchPermissions = useCallback(async () => {
try {
const perms = await checkPermission('DATA', 'FileItem');
setPermissions(perms);
return perms;
} catch (error: any) {
console.error('Error fetching permissions:', error);
const defaultPerms: UserPermissions = {
view: false,
read: 'n',
create: 'n',
update: 'n',
delete: 'n',
};
setPermissions(defaultPerms);
return defaultPerms;
}
}, [checkPermission]);
const fetchFiles = useCallback(async (params?: PaginationParams) => {
// Check if user is authenticated before fetching files
const cachedUser = getUserDataCache();
if (!cachedUser) {
// User is not authenticated, skip fetching files
setFiles([]);
// Note: loading and error are managed by useApiRequest hook
return;
}
try {
const data = await fetchFilesApi(request, params);
// Log the entire backend response for debugging
console.log('📦 Backend response from /api/files/list:', {
fullResponse: data,
responseType: typeof data,
isArray: Array.isArray(data),
hasItems: data && typeof data === 'object' && 'items' in data,
itemsCount: data && typeof data === 'object' && 'items' in data ? (Array.isArray(data.items) ? data.items.length : 'not an array') : 'no items property',
firstItem: data && typeof data === 'object' && 'items' in data && Array.isArray(data.items) && data.items.length > 0 ? data.items[0] : 'no first item',
pagination: data && typeof data === 'object' && 'pagination' in data ? data.pagination : 'no pagination'
});
// Handle paginated response
if (data && typeof data === 'object' && 'items' in data) {
const items = Array.isArray(data.items) ? data.items : [];
console.log('📋 Processing paginated response:', {
itemsCount: items.length,
firstItemRaw: items.length > 0 ? items[0] : null,
allItemsRaw: items
});
// Use backend data directly - no mapping needed, just like prompts
// Field names come from backend attributes
console.log('📊 Final files array (using backend data directly):', items);
setFiles(items);
if (data.pagination) {
setPagination(data.pagination);
}
} else {
// Handle non-paginated response (backward compatibility)
console.log('📋 Processing non-paginated response:', {
isArray: Array.isArray(data),
dataLength: Array.isArray(data) ? data.length : 'not an array',
firstItemRaw: Array.isArray(data) && data.length > 0 ? data[0] : null,
allDataRaw: data
});
// Use backend data directly - no mapping needed, just like prompts
const items = Array.isArray(data) ? data : [];
console.log('📊 Final files array (non-paginated, using backend data directly):', items);
setFiles(items);
setPagination(null);
}
} catch (error: any) {
// Error is already handled by useApiRequest
setFiles([]);
setPagination(null);
}
}, [request]);
// Optimistically remove a file from the local state
const removeFileOptimistically = (fileId: string) => {
setFiles(prevFiles => prevFiles.filter(file => file.id !== fileId));
};
// Optimistically update a file in the local state
const updateFileOptimistically = (fileId: string, updateData: any) => {
setFiles(prevFiles =>
prevFiles.map(file =>
file.id === fileId
? { ...file, ...updateData }
: file
)
);
};
// Fetch a single file by ID
const fetchFileById = useCallback(async (fileId: string): Promise<UserFile | null> => {
return await fetchFileByIdApi(request, fileId);
}, [request]);
// Generate edit fields from attributes dynamically
const generateEditFieldsFromAttributes = useCallback((): Array<{
key: string;
label: string;
type: 'string' | 'boolean' | 'email' | 'textarea' | 'date' | 'enum' | 'readonly';
editable?: boolean;
required?: boolean;
validator?: (value: any) => string | null;
minRows?: number;
maxRows?: number;
}> => {
if (!attributes || attributes.length === 0) {
return [];
}
const editableFields = attributes
.filter(attr => {
// Filter out non-editable fields (id, mandateId, fileHash, etc.)
const nonEditableFields = ['id', 'mandateId', 'fileHash', 'creationDate', 'source'];
return !nonEditableFields.includes(attr.name);
})
.map(attr => {
// Map attribute type to form field type
let fieldType: 'string' | 'boolean' | 'email' | 'textarea' | 'date' | 'enum' | 'readonly' = 'string';
if (attr.type === 'boolean') {
fieldType = 'boolean';
} else if (attr.type === 'date') {
fieldType = 'date';
} else if (attr.type === 'enum' && attr.filterOptions) {
fieldType = 'enum';
} else if (attr.name === 'fileName' || attr.name === 'file_name') {
fieldType = 'string';
}
// Define validators and required fields
let required = false;
let validator: ((value: any) => string | null) | undefined = undefined;
if (attr.name === 'fileName' || attr.name === 'file_name') {
required = true;
validator = (value: any) => {
if (!value || (typeof value === 'string' && value.trim() === '')) {
return 'File name cannot be empty';
}
if (typeof value === 'string' && value.length > 255) {
return 'File name cannot exceed 255 characters';
}
return null;
};
}
return {
key: attr.name,
label: attr.label || attr.name,
type: fieldType,
editable: true,
required,
validator
};
});
return editableFields;
}, [attributes]);
// Ensure attributes are loaded - can be called by EditActionButton
const ensureAttributesLoaded = useCallback(async () => {
// If attributes are already loaded, return them
if (attributes && attributes.length > 0) {
return attributes;
}
// Otherwise, fetch them and return the result
const fetchedAttributes = await fetchAttributes();
return fetchedAttributes;
}, [attributes, fetchAttributes]);
// Fetch attributes and permissions on mount
// Note: Do NOT fetch files here - let the table component control pagination
useEffect(() => {
fetchAttributes();
fetchPermissions();
}, [fetchAttributes, fetchPermissions]);
// Listen for file upload events and refresh the list
useEffect(() => {
const handleFileUploaded = (event: CustomEvent) => {
console.log('📁 File uploaded event received, refreshing list...', event.detail);
// Small delay to ensure backend has persisted the file
setTimeout(() => {
fetchFiles();
}, 100);
};
window.addEventListener('fileUploaded', handleFileUploaded as EventListener);
return () => {
window.removeEventListener('fileUploaded', handleFileUploaded as EventListener);
};
}, [fetchFiles]);
return {
data: files,
loading,
error,
refetch: fetchFiles,
removeFileOptimistically,
updateFileOptimistically,
attributes,
permissions,
pagination,
fetchFileById,
generateEditFieldsFromAttributes,
ensureAttributesLoaded
};
}
// File operations hook
export function useFileOperations() {
const [downloadingFiles, setDownloadingFiles] = useState<Set<string>>(new Set());
const [deletingFiles, setDeletingFiles] = useState<Set<string>>(new Set());
const [editingFiles, setEditingFiles] = useState<Set<string>>(new Set());
const [uploadingFile, setUploadingFile] = useState(false);
const [isLoading] = useState(false);
const [downloadError, setDownloadError] = useState<string | null>(null);
const [deleteError, setDeleteError] = useState<string | null>(null);
const [uploadError, setUploadError] = useState<string | null>(null);
const [previewingFiles, setPreviewingFiles] = useState<Set<string>>(new Set());
const [previewError, setPreviewError] = useState<string | null>(null);
// Toast for notifications
const { showWarning } = useToast();
// Language context
const { t } = useLanguage();
// API request function for API layer calls
const { request } = useApiRequest();
const handleFileDownload = async (fileId: string, fileName: string) => {
setDownloadError(null);
setDownloadingFiles(prev => new Set(prev).add(fileId));
try {
// Try to get the file download
const response = await api.get(`/api/files/${fileId}/download`, {
responseType: 'blob',
validateStatus: function (status: number) {
return status >= 200 && status < 300; // default
}
});
const blob = response.data;
// Create a download link and trigger the download
const url = window.URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = url;
link.setAttribute('download', fileName);
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
window.URL.revokeObjectURL(url);
return true;
} catch (error: any) {
console.error(`❌ Download failed for ${fileName}:`, error);
let errorMessage = error.message;
if (error.response?.status === 404) {
errorMessage = `File "${fileName}" not found or has been deleted.`;
} else if (error.response?.status === 403) {
errorMessage = `No permission to download "${fileName}".`;
}
setDownloadError(errorMessage);
return false;
} finally {
setDownloadingFiles(prev => {
const newSet = new Set(prev);
newSet.delete(fileId);
return newSet;
});
}
};
const handleFileDelete = async (fileId: string, onOptimisticDelete?: () => void) => {
setDeleteError(null);
setDeletingFiles(prev => new Set(prev).add(fileId));
// Optimistically remove from UI if callback provided
if (onOptimisticDelete) {
onOptimisticDelete();
}
try {
await deleteFileApi(request, fileId);
// Add a small delay to ensure backend has time to process
await new Promise(resolve => setTimeout(resolve, 300));
return true;
} catch (error: any) {
console.error(`❌ Delete failed for file ID ${fileId}:`, error);
let errorMessage = error.message;
if (error.response?.status === 404) {
errorMessage = `File not found or has already been deleted.`;
// If file doesn't exist, consider it successfully "deleted"
return true;
} else if (error.response?.status === 403) {
errorMessage = `No permission to delete this file.`;
}
setDeleteError(errorMessage);
// If deletion failed and we optimistically removed it, we should refetch to restore the file
return false;
} finally {
setDeletingFiles(prev => {
const newSet = new Set(prev);
newSet.delete(fileId);
return newSet;
});
}
};
const handleFileDeleteMultiple = async (fileIds: string[], onOptimisticDelete?: (fileIds: string[]) => void) => {
setDeleteError(null);
setDeletingFiles(prev => {
const newSet = new Set(prev);
fileIds.forEach(id => newSet.add(id));
return newSet;
});
// Optimistically remove from UI if callback provided
if (onOptimisticDelete) {
onOptimisticDelete(fileIds);
}
try {
// Use API function for bulk delete
const results = await deleteFilesApi(request, fileIds);
// Check if any deletions failed
const failures = results.filter(result => !result.success);
if (failures.length > 0) {
console.error(`${failures.length} out of ${fileIds.length} files failed to delete`);
// For now, we'll consider it successful if at least some files were deleted
// In a more robust implementation, you might want to handle partial failures differently
}
// Add a small delay to ensure backend has time to process
await new Promise(resolve => setTimeout(resolve, 300));
return true;
} catch (error: any) {
console.error(`❌ Bulk delete failed:`, error);
setDeleteError(error.message || 'Bulk delete failed');
return false;
} finally {
setDeletingFiles(prev => {
const newSet = new Set(prev);
fileIds.forEach(id => newSet.delete(id));
return newSet;
});
}
};
/**
* File upload function - backend bug has been fixed!
*
* BACKEND FIXES APPLIED:
* - Fixed file.fileName → file.filename in routeDataFiles.py
* - Removed workflowId from FileItem creation in interfaceComponentObjects.py
* - Upload should now work correctly
*/
const handleFileUpload = async (file: globalThis.File, workflowId?: string, featureInstanceId?: string) => {
setUploadError(null);
setUploadingFile(true);
try {
// Validate file before upload
if (!file || !file.name || file.name.trim() === '') {
throw new Error('Invalid file: File must have a valid name');
}
if (file.size === 0) {
throw new Error('Invalid file: File cannot be empty');
}
const formData = new FormData();
formData.append('file', file);
if (workflowId) {
formData.append('workflowId', workflowId);
}
if (featureInstanceId) {
formData.append('featureInstanceId', featureInstanceId);
}
// FormData is now correctly configured for backend
const response = await api.post('/api/files/upload', formData, {
headers: {
'Content-Type': 'multipart/form-data',
}
});
const fileData = response.data;
// Check if the response indicates a duplicate file
if (fileData && fileData.isDuplicate && fileData.message) {
const fileName = fileData.originalFileName || file.name;
const messageTemplate = t('warning.duplicate_file.message');
const message = messageTemplate.replace('{fileName}', fileName);
showWarning(t('warning.duplicate_file.title'), message);
}
// Dispatch event to notify other components about the new file
window.dispatchEvent(new CustomEvent('fileUploaded', { detail: fileData }));
return { success: true, fileData };
} catch (error: any) {
console.error('Upload failed:', error);
let errorMessage = error.message;
if (error.response?.status === 500) {
if (errorMessage.includes('validation')) {
errorMessage = 'File validation failed. Please ensure the file is valid and try again.';
} else {
errorMessage = 'Server error during upload. Please try again later.';
}
}
setUploadError(errorMessage);
return { success: false, error: errorMessage };
} finally {
setUploadingFile(false);
}
};
const handleFileUpdate = async (fileId: string, updateData: Partial<{ fileName: string }>, originalFileData?: any) => {
setUploadError(null); // Reuse upload error state for update operations
setEditingFiles(prev => new Set(prev).add(fileId));
try {
// Use PUT request with complete file object
// Always use current timestamp for creationDate to avoid validation issues
const currentTimestamp = Math.floor(Date.now() / 1000);
const creationDate = currentTimestamp;
const completeFileObject = {
id: fileId,
mandateId: originalFileData?.mandateId || "00000000-0000-0000-0000-000000000000",
fileName: updateData.fileName,
mimeType: originalFileData?.mime_type || "application/octet-stream",
fileHash: originalFileData?.fileHash || "0000000000000000000000000000000000000000",
fileSize: originalFileData?.size || 0,
creationDate: Math.floor(creationDate) // Ensure it's an integer
};
const updatedFile = await updateFileApi(request, fileId, completeFileObject);
return { success: true, fileData: updatedFile };
} catch (error: any) {
console.error(`Update failed for file ID ${fileId}:`, error);
let errorMessage = error.message;
if (error.response?.status === 404) {
errorMessage = `File not found or has been deleted.`;
} else if (error.response?.status === 403) {
errorMessage = `No permission to update this file.`;
} else if (error.response?.status === 400) {
errorMessage = `Invalid file update data: ${error.response?.data?.detail || error.response?.data || errorMessage}`;
} else if (error.response?.status === 422) {
errorMessage = `Validation error: ${error.response?.data?.detail || errorMessage}`;
} else if (error.response?.status === 500) {
errorMessage = `Server error: ${error.response?.data?.detail || errorMessage}`;
}
setUploadError(errorMessage);
return { success: false, error: errorMessage };
} finally {
setEditingFiles(prev => {
const newSet = new Set(prev);
newSet.delete(fileId);
return newSet;
});
}
};
const handleFilePreview = async (fileId: string, fileName: string, mimeType?: string) => {
setPreviewError(null);
setPreviewingFiles(prev => new Set(prev).add(fileId));
try {
// For PDF files, try JSON response first (API returns base64-encoded PDF)
if (mimeType === 'application/pdf') {
try {
const response = await api.get(`/api/files/${fileId}/preview`, {
responseType: 'json',
validateStatus: function (status: number) {
return status >= 200 && status < 300;
}
});
const jsonResponse = response.data;
// Check if response has base64-encoded PDF content
if (jsonResponse && typeof jsonResponse === 'object' && 'content' in jsonResponse) {
let content = jsonResponse.content;
// The content field contains base64-encoded JSON, so decode it first
if (typeof content === 'string' && /^[A-Za-z0-9+/=]+$/.test(content)) {
try {
const decodedJsonString = atob(content);
// Parse the decoded JSON string
const nestedJson = JSON.parse(decodedJsonString);
if (nestedJson && typeof nestedJson === 'object' && 'content' in nestedJson) {
const innerContent = nestedJson.content;
const isBase64 = /^[A-Za-z0-9+/=]+$/.test(innerContent);
if (isBase64) {
// It's base64-encoded PDF content
content = innerContent;
} else {
// It's plain text content, not a PDF
// Return the text content for the FilePreview to handle as text
return {
success: true,
previewUrl: null,
blob: null,
isJsonContent: true,
decodedContent: innerContent,
isTextContent: true
};
}
}
} catch (decodeError) {
console.warn('⚠️ Failed to decode base64 content or parse JSON:', decodeError);
}
}
// Decode base64 content
let decodedContent;
try {
decodedContent = atob(content);
// Verify it's actually a PDF
const isPDF = decodedContent.startsWith('%PDF');
if (!isPDF) {
console.warn('⚠️ Decoded content does not appear to be a valid PDF');
}
} catch (decodeError) {
console.error('❌ Failed to decode base64 PDF content:', decodeError);
throw new Error('Failed to decode PDF content');
}
// Create a blob from the decoded PDF content
// Convert string to Uint8Array for proper binary handling
const uint8Array = new Uint8Array(decodedContent.length);
for (let i = 0; i < decodedContent.length; i++) {
uint8Array[i] = decodedContent.charCodeAt(i);
}
const blob = new Blob([uint8Array], { type: 'application/pdf' });
const url = window.URL.createObjectURL(blob);
return { success: true, previewUrl: url, blob: blob, isJsonContent: true, decodedContent: decodedContent };
} else {
throw new Error('No content field in PDF response');
}
} catch (jsonError) {
// Fallback to blob response
const response = await api.get(`/api/files/${fileId}/preview`, {
responseType: 'blob',
validateStatus: function (status: number) {
return status >= 200 && status < 300;
}
});
const previewData = response.data;
const url = window.URL.createObjectURL(previewData);
return { success: true, previewUrl: url, blob: previewData, isJsonContent: false };
}
}
// For image files, try JSON response first (API returns base64-encoded images)
if (mimeType?.startsWith('image/')) {
try {
const response = await api.get(`/api/files/${fileId}/preview`, {
responseType: 'json',
validateStatus: function (status: number) {
return status >= 200 && status < 300;
}
});
const jsonResponse = response.data;
// Check if response has base64-encoded image content
if (jsonResponse && typeof jsonResponse === 'object' && 'content' in jsonResponse) {
let content = jsonResponse.content;
const responseMimeType = jsonResponse.mimeType || mimeType;
// The content field contains base64-encoded data, decode it first
if (typeof content === 'string' && /^[A-Za-z0-9+/=]+$/.test(content)) {
try {
const decodedString = atob(content);
// Check if it's JSON (nested structure) or direct image data
if (decodedString.startsWith('{')) {
// It's JSON, parse it
const nestedJson = JSON.parse(decodedString);
if (nestedJson && typeof nestedJson === 'object' && 'content' in nestedJson) {
const innerContent = nestedJson.content;
const isBase64 = /^[A-Za-z0-9+/=]+$/.test(innerContent);
if (isBase64) {
// It's base64-encoded image content
content = innerContent;
} else {
throw new Error('Inner content is not base64-encoded');
}
}
} else if (decodedString.startsWith('\x89PNG') || decodedString.startsWith('\xFF\xD8\xFF') || decodedString.startsWith('GIF8') || decodedString.startsWith('RIFF')) {
// It's direct image data, use it as is
content = btoa(decodedString); // Re-encode as base64 for processing
} else {
throw new Error('Decoded content is neither JSON nor image data');
}
} catch (decodeError) {
console.warn('⚠️ Failed to decode base64 content:', decodeError);
throw decodeError;
}
}
// Decode base64 content
let decodedContent;
try {
decodedContent = atob(content);
// Verify it's actually an image by checking for common image headers
const isJPEG = decodedContent.startsWith('\xFF\xD8\xFF');
const isPNG = decodedContent.startsWith('\x89PNG\r\n\x1a\n');
const isGIF = decodedContent.startsWith('GIF8');
const isWebP = decodedContent.startsWith('RIFF') && decodedContent.includes('WEBP');
if (!isJPEG && !isPNG && !isGIF && !isWebP) {
console.warn('⚠️ Decoded content does not appear to be a valid image');
}
} catch (decodeError) {
console.error('❌ Failed to decode base64 image content:', decodeError);
throw new Error('Failed to decode image content');
}
// Create a blob from the decoded image content
// Convert string to Uint8Array for proper binary handling
const uint8Array = new Uint8Array(decodedContent.length);
for (let i = 0; i < decodedContent.length; i++) {
uint8Array[i] = decodedContent.charCodeAt(i);
}
const blob = new Blob([uint8Array], { type: responseMimeType });
const url = window.URL.createObjectURL(blob);
return { success: true, previewUrl: url, blob: blob, isJsonContent: true, decodedContent: decodedContent };
} else {
throw new Error('No content field in image response');
}
} catch (jsonError) {
// Fallback to blob response
const response = await api.get(`/api/files/${fileId}/preview`, {
responseType: 'blob',
validateStatus: function (status: number) {
return status >= 200 && status < 300;
}
});
const previewData = response.data;
const url = window.URL.createObjectURL(previewData);
return { success: true, previewUrl: url, blob: previewData, isJsonContent: false };
}
}
// For other files, first try to get JSON response (for text-based files)
try {
const response = await api.get(`/api/files/${fileId}/preview`, {
responseType: 'json',
validateStatus: function (status: number) {
return status >= 200 && status < 300;
}
});
const jsonResponse = response.data;
// Check if response has content field (structured response)
if (jsonResponse && typeof jsonResponse === 'object' && 'content' in jsonResponse) {
const content = jsonResponse.content;
const mimeType = jsonResponse.mimeType || 'text/plain';
// Check if content is base64 encoded (common pattern)
let decodedContent = content;
try {
// Try to decode as base64 if it looks like base64
if (content && typeof content === 'string' && /^[A-Za-z0-9+/=]+$/.test(content)) {
decodedContent = atob(content);
}
} catch (decodeError) {
decodedContent = content;
}
// Create a blob from the (possibly decoded) content
const blob = new Blob([decodedContent], { type: mimeType });
const url = window.URL.createObjectURL(blob);
return { success: true, previewUrl: url, blob: blob, isJsonContent: true, decodedContent: decodedContent };
} else if (jsonResponse && typeof jsonResponse === 'object' && 'result' in jsonResponse) {
// Handle base64 encoded content in 'result' field
try {
// Decode base64 content
const decodedContent = atob(jsonResponse.result);
const mimeType = jsonResponse.mimeType || 'application/json';
// Create a blob from the decoded content
const blob = new Blob([decodedContent], { type: mimeType });
const url = window.URL.createObjectURL(blob);
return { success: true, previewUrl: url, blob: blob, isJsonContent: true, decodedContent: decodedContent };
} catch (decodeError) {
console.error('❌ Failed to decode base64 content:', decodeError);
// Fallback to treating as raw JSON
const blob = new Blob([JSON.stringify(jsonResponse, null, 2)], { type: 'application/json' });
const url = window.URL.createObjectURL(blob);
return { success: true, previewUrl: url, blob: blob, isJsonContent: true };
}
} else {
// If it's not structured JSON, treat as raw content
const blob = new Blob([JSON.stringify(jsonResponse, null, 2)], { type: 'application/json' });
const url = window.URL.createObjectURL(blob);
return { success: true, previewUrl: url, blob: blob, isJsonContent: true };
}
} catch (jsonError) {
// Fallback to blob response for binary files
const response = await api.get(`/api/files/${fileId}/preview`, {
responseType: 'blob',
validateStatus: function (status: number) {
return status >= 200 && status < 300;
}
});
const previewData = response.data;
// Create a blob URL for preview
const url = window.URL.createObjectURL(previewData);
return { success: true, previewUrl: url, blob: previewData, isJsonContent: false };
}
} catch (error: any) {
console.error(`❌ Preview failed for ${fileName}:`, error);
let errorMessage = error.message;
if (error.response?.status === 404) {
errorMessage = `File "${fileName}" not found or has been deleted.`;
} else if (error.response?.status === 403) {
errorMessage = `No permission to preview "${fileName}".`;
} else if (error.response?.status === 415) {
errorMessage = `File type "${fileName}" is not supported for preview.`;
}
setPreviewError(errorMessage);
return { success: false, error: errorMessage };
} finally {
setPreviewingFiles(prev => {
const newSet = new Set(prev);
newSet.delete(fileId);
return newSet;
});
}
};
// Generic inline update handler for FormGeneratorTable
const handleInlineUpdate = async (fileId: string, changes: Partial<{ fileName: string }>, existingRow?: any) => {
if (!existingRow) {
throw new Error('Existing row data required for inline update');
}
const result = await handleFileUpdate(fileId, changes, existingRow);
if (!result.success) {
throw new Error(result.error || 'Failed to update');
}
return result;
};
return {
downloadingFiles,
deletingFiles,
editingFiles,
uploadingFile,
downloadError,
deleteError,
uploadError,
previewingFiles,
previewError,
handleFileDownload,
handleFileDelete,
handleFileDeleteMultiple,
handleFileUpload,
handleFileUpdate,
handleFilePreview,
handleInlineUpdate,
isLoading
};
}