945 lines
No EOL
36 KiB
Python
945 lines
No EOL
36 KiB
Python
import os
|
|
import json
|
|
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Query
|
|
from typing import List, Dict, Any, Optional
|
|
from fastapi import status
|
|
import asyncio
|
|
import uuid
|
|
from datetime import datetime
|
|
import logging
|
|
|
|
# Import auth module
|
|
from auth import get_current_active_user, get_user_context
|
|
|
|
# Import interfaces
|
|
from modules.lucydom_interface import get_lucydom_interface
|
|
from modules.agentservice_workflow_manager import get_workflow_manager
|
|
|
|
# Import für AI-Service
|
|
from connectors.connector_aichat_openai import ChatService as OpenAIChatService
|
|
from connectors.connector_aichat_anthropic import ChatService as AnthropicChatService
|
|
|
|
# Import models
|
|
import modules.lucydom_model as lucydom_model
|
|
|
|
# Alle Attribute des Models ermitteln (außer interne/spezielle Attribute)
|
|
def get_model_attributes(model_class):
|
|
return [attr for attr in dir(model_class)
|
|
if not callable(getattr(model_class, attr))
|
|
and not attr.startswith('_')
|
|
and attr != 'metadata'
|
|
and attr != 'query'
|
|
and attr != 'query_class'
|
|
and attr != 'label'
|
|
and attr != 'field_labels']
|
|
|
|
# Modell-Attribute für Workflow
|
|
workflow_attributes = get_model_attributes(lucydom_model.Workflow)
|
|
|
|
# Logger konfigurieren
|
|
logger = logging.getLogger(__name__)
|
|
|
|
# Router für Workflow-Endpunkte erstellen
|
|
router = APIRouter(
|
|
prefix="/api/workflows",
|
|
tags=["Workflow"],
|
|
responses={404: {"description": "Not found"}}
|
|
)
|
|
|
|
# Hilfsfunktion zum Erstellen des AI-Services basierend auf der Konfiguration
|
|
def get_ai_service(mandate_id: int, user_id: int):
|
|
"""Gibt den konfigurierten AI-Service zurück"""
|
|
import configload
|
|
|
|
config = configload.load_config()
|
|
ai_provider = config.get('Module_AgentserviceInterface', 'AI_PROVIDER').lower()
|
|
if ai_provider == "anthropic":
|
|
return AnthropicChatService()
|
|
else:
|
|
return OpenAIChatService()
|
|
|
|
@router.get("", response_model=List[Dict[str, Any]])
|
|
async def list_workflows(
|
|
current_user: Dict[str, Any] = Depends(get_current_active_user)
|
|
):
|
|
"""Listet alle Workflows des Benutzers auf"""
|
|
mandate_id, user_id = await get_user_context(current_user)
|
|
|
|
# WorkflowManager mit Benutzerkontext initialisieren
|
|
workflow_manager = get_workflow_manager(mandate_id, user_id)
|
|
|
|
# Use the refactored list_workflows method that takes mandate_id and user_id
|
|
workflows = await workflow_manager.list_workflows(mandate_id, user_id)
|
|
return workflows
|
|
|
|
@router.post("", response_model=Dict[str, Any])
|
|
async def create_workflow(
|
|
workflow_request: lucydom_model.WorkflowCreateRequest = Body(...),
|
|
current_user: Dict[str, Any] = Depends(get_current_active_user)
|
|
):
|
|
"""
|
|
Erstellt einen neuen Workflow und führt ihn aus.
|
|
"""
|
|
mandate_id, user_id = await get_user_context(current_user)
|
|
|
|
# Add debug logging for the workflow request
|
|
logger.debug(f"Creating workflow with request: {workflow_request}")
|
|
logger.debug(f"Files in request: {workflow_request.files}")
|
|
|
|
# LucyDOM-Interface mit Benutzerkontext initialisieren
|
|
lucy_interface = get_lucydom_interface(mandate_id, user_id)
|
|
|
|
# Prüfen, ob Dateien existieren
|
|
files = []
|
|
for file_id in workflow_request.files:
|
|
# Add logging before file lookup
|
|
logger.debug(f"Looking up file with ID: {file_id}")
|
|
|
|
file = lucy_interface.get_file(file_id)
|
|
if not file:
|
|
logger.warning(f"File with ID {file_id} not found in database")
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Datei mit ID {file_id} nicht gefunden"
|
|
)
|
|
|
|
# Add logging on successful file lookup
|
|
logger.debug(f"Found file: {file.get('name', 'unknown')} (ID: {file_id})")
|
|
files.append(file)
|
|
|
|
# Workflow ID generieren
|
|
workflow_id = str(uuid.uuid4())
|
|
|
|
# AI-Service erstellen
|
|
ai_service = get_ai_service(mandate_id, user_id)
|
|
|
|
# WorkflowManager mit Benutzerkontext initialisieren
|
|
workflow_manager = get_workflow_manager(mandate_id, user_id, ai_service)
|
|
|
|
# Grundlegende Workflow-Daten erstellen
|
|
workflow_data = {
|
|
"id": workflow_id,
|
|
"mandate_id": mandate_id,
|
|
"user_id": user_id,
|
|
"name": workflow_request.name or f"Workflow {workflow_id}",
|
|
"status": "running",
|
|
"started_at": datetime.now().isoformat(),
|
|
"last_activity": datetime.now().isoformat(),
|
|
"prompt": workflow_request.prompt
|
|
}
|
|
|
|
# Attribute aus dem Request dynamisch setzen
|
|
for attr in workflow_attributes:
|
|
if hasattr(workflow_request, attr) and getattr(workflow_request, attr) is not None:
|
|
workflow_data[attr] = getattr(workflow_request, attr)
|
|
|
|
# Workflow in Datenbank speichern - this should now be handled by initialize_workflow in the manager
|
|
if lucy_interface:
|
|
try:
|
|
lucy_interface.create_workflow(workflow_data)
|
|
except Exception as e:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_403_FORBIDDEN,
|
|
detail=f"Fehler beim Speichern des Workflows in der Datenbank: {str(e)}"
|
|
)
|
|
|
|
# Log files before executing workflow
|
|
logger.info(f"Executing workflow with {len(files)} files:")
|
|
for file in files:
|
|
logger.debug(f"File: {file.get('name', 'unknown')} (ID: {file.get('id', 'unknown')})")
|
|
|
|
# Workflow starten (asynchron)
|
|
workflow_task = asyncio.create_task(
|
|
workflow_manager.execute_workflow(
|
|
message={"content": workflow_request.prompt, "role": "user"},
|
|
workflow_id=workflow_id,
|
|
files=files
|
|
)
|
|
)
|
|
|
|
# Workflow-Namen setzen, falls vorhanden
|
|
if workflow_request.name:
|
|
if workflow_id in workflow_manager.workflows:
|
|
workflow_manager.workflows[workflow_id]["name"] = workflow_request.name
|
|
workflow_manager._save_workflow(workflow_manager.workflows[workflow_id])
|
|
|
|
# Sofort eine Antwort zurückgeben
|
|
return {
|
|
"workflow_id": workflow_id,
|
|
"status": "running",
|
|
"message": "Workflow wurde gestartet"
|
|
}
|
|
|
|
|
|
@router.get("/{workflow_id}", response_model=Dict[str, Any])
|
|
async def get_workflow(
|
|
workflow_id: str,
|
|
current_user: Dict[str, Any] = Depends(get_current_active_user)
|
|
):
|
|
"""
|
|
Gibt detaillierte Informationen zu einem Workflow zurück.
|
|
"""
|
|
mandate_id, user_id = await get_user_context(current_user)
|
|
|
|
# WorkflowManager mit Benutzerkontext initialisieren
|
|
workflow_manager = get_workflow_manager(mandate_id, user_id)
|
|
|
|
workflow = await workflow_manager.load_workflow(workflow_id)
|
|
if not workflow:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Workflow mit ID {workflow_id} nicht gefunden"
|
|
)
|
|
|
|
return workflow
|
|
|
|
@router.put("/{workflow_id}", response_model=Dict[str, Any])
|
|
async def update_workflow(
|
|
workflow_id: str,
|
|
workflow_data: Dict[str, Any] = Body(...),
|
|
current_user: Dict[str, Any] = Depends(get_current_active_user)
|
|
):
|
|
"""Aktualisiert Metadaten eines Workflows"""
|
|
mandate_id, user_id = await get_user_context(current_user)
|
|
|
|
# WorkflowManager mit Benutzerkontext initialisieren
|
|
workflow_manager = get_workflow_manager(mandate_id, user_id)
|
|
|
|
# Workflow laden
|
|
workflow = await workflow_manager.load_workflow(workflow_id)
|
|
if not workflow:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Workflow mit ID {workflow_id} nicht gefunden"
|
|
)
|
|
|
|
# LucyDOM-Interface mit Benutzerkontext initialisieren, um Datenbank zu aktualisieren
|
|
lucy_interface = get_lucydom_interface(mandate_id, user_id)
|
|
|
|
# Attribute aus dem Request dynamisch filtern
|
|
update_data = {}
|
|
|
|
# Zunächst alle updatebaren Attribute sammeln
|
|
for attr in workflow_attributes:
|
|
if attr in workflow_data:
|
|
update_data[attr] = workflow_data[attr]
|
|
|
|
# Besondere Attribute (außerhalb des Models) separat behandeln
|
|
if "last_activity" not in update_data:
|
|
update_data["last_activity"] = datetime.now().isoformat()
|
|
|
|
# Daten im Workflow aktualisieren
|
|
for key, value in update_data.items():
|
|
workflow[key] = value
|
|
|
|
# Workflow speichern
|
|
workflow_manager._save_workflow(workflow)
|
|
|
|
# In Datenbank aktualisieren
|
|
if lucy_interface:
|
|
lucy_interface.update_workflow(workflow_id, update_data)
|
|
|
|
return {
|
|
"workflow_id": workflow_id,
|
|
"message": "Workflow wurde aktualisiert"
|
|
}
|
|
|
|
@router.delete("/{workflow_id}", response_model=Dict[str, Any])
|
|
async def delete_workflow(
|
|
workflow_id: str,
|
|
current_user: Dict[str, Any] = Depends(get_current_active_user)
|
|
):
|
|
"""Löscht einen Workflow"""
|
|
mandate_id, user_id = await get_user_context(current_user)
|
|
|
|
# WorkflowManager mit Benutzerkontext initialisieren
|
|
workflow_manager = get_workflow_manager(mandate_id, user_id)
|
|
|
|
# Using the refactored delete_workflow method
|
|
success = await workflow_manager.delete_workflow(workflow_id)
|
|
if not success:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Workflow mit ID {workflow_id} nicht gefunden"
|
|
)
|
|
|
|
return {
|
|
"workflow_id": workflow_id,
|
|
"message": "Workflow wurde gelöscht"
|
|
}
|
|
|
|
@router.get("/{workflow_id}/status", response_model=Dict[str, Any])
|
|
async def get_workflow_status(
|
|
workflow_id: str,
|
|
current_user: Dict[str, Any] = Depends(get_current_active_user)
|
|
):
|
|
"""Status eines laufenden Workflows abrufen"""
|
|
mandate_id, user_id = await get_user_context(current_user)
|
|
|
|
# WorkflowManager mit Benutzerkontext initialisieren
|
|
workflow_manager = get_workflow_manager(mandate_id, user_id)
|
|
|
|
# Use the refactored get_workflow_status method
|
|
status = workflow_manager.get_workflow_status(workflow_id)
|
|
if not status:
|
|
# Versuche, den Workflow zu laden
|
|
workflow = await workflow_manager.load_workflow(workflow_id)
|
|
if not workflow:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Workflow mit ID {workflow_id} nicht gefunden"
|
|
)
|
|
|
|
# Status aus dem geladenen Workflow erstellen
|
|
status = workflow_manager.get_workflow_status(workflow_id)
|
|
|
|
return status
|
|
|
|
@router.get("/{workflow_id}/logs", response_model=List[Dict[str, Any]])
|
|
async def get_workflow_logs(
|
|
workflow_id: str,
|
|
current_user: Dict[str, Any] = Depends(get_current_active_user)
|
|
):
|
|
"""Protokolle eines Workflows abrufen"""
|
|
mandate_id, user_id = await get_user_context(current_user)
|
|
|
|
# WorkflowManager mit Benutzerkontext initialisieren
|
|
workflow_manager = get_workflow_manager(mandate_id, user_id)
|
|
|
|
# Use the refactored get_workflow_logs method
|
|
logs = workflow_manager.get_workflow_logs(workflow_id)
|
|
if logs is None:
|
|
# Versuche, den Workflow zu laden
|
|
workflow = await workflow_manager.load_workflow(workflow_id)
|
|
if not workflow:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Workflow mit ID {workflow_id} nicht gefunden"
|
|
)
|
|
|
|
# Logs aus dem geladenen Workflow
|
|
logs = workflow.get("logs", [])
|
|
|
|
return logs
|
|
|
|
@router.get("/{workflow_id}/messages", response_model=List[Dict[str, Any]])
|
|
async def get_workflow_messages(
|
|
workflow_id: str,
|
|
current_user: Dict[str, Any] = Depends(get_current_active_user)
|
|
):
|
|
"""Nachrichten eines Workflows abrufen"""
|
|
mandate_id, user_id = await get_user_context(current_user)
|
|
|
|
# WorkflowManager mit Benutzerkontext initialisieren
|
|
workflow_manager = get_workflow_manager(mandate_id, user_id)
|
|
|
|
# Use the refactored get_workflow_messages method
|
|
messages = workflow_manager.get_workflow_messages(workflow_id)
|
|
if messages is None:
|
|
# Versuche, den Workflow zu laden
|
|
workflow = await workflow_manager.load_workflow(workflow_id)
|
|
if not workflow:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Workflow mit ID {workflow_id} nicht gefunden"
|
|
)
|
|
|
|
# Nachrichten aus dem geladenen Workflow
|
|
messages = workflow.get("messages", [])
|
|
|
|
return messages
|
|
|
|
@router.post("/{workflow_id}/stop", response_model=Dict[str, Any])
|
|
async def stop_workflow(
|
|
workflow_id: str = Path(..., description="ID des zu stoppenden Workflows"),
|
|
current_user: Dict[str, Any] = Depends(get_current_active_user)
|
|
):
|
|
"""Stoppt einen laufenden Workflow"""
|
|
mandate_id, user_id = await get_user_context(current_user)
|
|
|
|
# WorkflowManager mit Benutzerkontext initialisieren
|
|
workflow_manager = get_workflow_manager(mandate_id, user_id)
|
|
|
|
# Use the refactored stop_workflow method
|
|
result = await workflow_manager.stop_workflow(workflow_id)
|
|
if not result:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Workflow mit ID {workflow_id} nicht gefunden oder bereits beendet"
|
|
)
|
|
|
|
return {
|
|
"workflow_id": workflow_id,
|
|
"status": "stopped",
|
|
"message": "Workflow wurde gestoppt"
|
|
}
|
|
|
|
@router.post("/{workflow_id}/user-input", response_model=Dict[str, Any])
|
|
async def submit_user_input(
|
|
workflow_id: str = Path(..., description="ID des Workflows"),
|
|
user_input: Dict[str, Any] = Body(...),
|
|
current_user: Dict[str, Any] = Depends(get_current_active_user)
|
|
):
|
|
"""
|
|
Ermöglicht es dem Benutzer, Eingaben für einen laufenden Workflow zu senden.
|
|
Dies wird verwendet, wenn der User-Agent im Workflow angesprochen wird.
|
|
"""
|
|
mandate_id, user_id = await get_user_context(current_user)
|
|
|
|
# Improved logging
|
|
logger.info(f"User input received for workflow {workflow_id}")
|
|
logger.debug(f"Input content: {user_input.get('message', '')[:50]}...")
|
|
|
|
# LucyDOM-Interface mit Benutzerkontext initialisieren
|
|
lucy_interface = get_lucydom_interface(mandate_id, user_id)
|
|
|
|
# Ensure message content is valid
|
|
message_content = user_input.get("message", "")
|
|
if isinstance(message_content, dict) and "content" in message_content:
|
|
message_content = message_content["content"]
|
|
|
|
# If message content is None or empty, use a default value
|
|
if message_content is None or message_content.strip() == "":
|
|
logger.warning(f"Empty message received for workflow {workflow_id}, using default")
|
|
message_content = "Fortsetzung des Workflows"
|
|
|
|
# Process additional files
|
|
additional_files = []
|
|
additional_file_ids = user_input.get("additional_files", [])
|
|
logger.info(f"Processing {len(additional_file_ids)} additional files")
|
|
|
|
# Validate each file ID
|
|
for file_id in additional_file_ids:
|
|
try:
|
|
file = lucy_interface.get_file(file_id)
|
|
if not file:
|
|
logger.warning(f"File with ID {file_id} not found")
|
|
continue
|
|
|
|
# Check if file belongs to the current mandate/user
|
|
if file.get("mandate_id") != mandate_id:
|
|
logger.warning(f"File {file_id} does not belong to mandate {mandate_id}")
|
|
continue
|
|
|
|
additional_files.append(file)
|
|
logger.info(f"Added file {file.get('name', 'unnamed')} (ID: {file_id})")
|
|
except Exception as e:
|
|
logger.error(f"Error processing file {file_id}: {str(e)}")
|
|
# Continue with remaining files instead of failing
|
|
continue
|
|
|
|
# AI-Service und Web-Service erstellen
|
|
ai_service = get_ai_service(mandate_id, user_id)
|
|
|
|
# WorkflowManager mit Benutzerkontext initialisieren
|
|
workflow_manager = get_workflow_manager(mandate_id, user_id, ai_service)
|
|
|
|
# Load and verify workflow
|
|
try:
|
|
workflow = await workflow_manager.load_workflow(workflow_id)
|
|
if not workflow:
|
|
logger.error(f"Workflow {workflow_id} not found")
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Workflow mit ID {workflow_id} nicht gefunden"
|
|
)
|
|
|
|
if workflow.get("status") == "running":
|
|
logger.info(f"Workflow {workflow_id} is still running but will be continued with new input")
|
|
asyncio.create_task(workflow_manager.stop_workflow(workflow_id))
|
|
|
|
# Create message object with proper structure
|
|
message_object = {
|
|
"content": message_content,
|
|
"role": "user"
|
|
}
|
|
|
|
# Execute workflow with user input
|
|
logger.info(f"Executing workflow {workflow_id} with user input")
|
|
response = await workflow_manager.execute_workflow(
|
|
message=message_object,
|
|
workflow_id=workflow_id,
|
|
files=additional_files,
|
|
is_user_input=True
|
|
)
|
|
|
|
logger.info(f"Workflow execution completed: {response.get('status', 'unknown')}")
|
|
|
|
return {
|
|
"workflow_id": workflow_id,
|
|
"status": "processing",
|
|
"message": "Benutzereingabe wurde empfangen und wird verarbeitet"
|
|
}
|
|
except HTTPException:
|
|
# Re-raise HTTP exceptions
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error processing user input: {str(e)}", exc_info=True)
|
|
raise HTTPException(
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
detail=f"Fehler bei der Verarbeitung der Benutzereingabe: {str(e)}"
|
|
)
|
|
|
|
|
|
@router.get("/{workflow_id}/data-statistics", response_model=Dict[str, Any])
|
|
async def get_workflow_data_statistics(
|
|
workflow_id: str,
|
|
current_user: Dict[str, Any] = Depends(get_current_active_user)
|
|
):
|
|
"""
|
|
Gibt Statistiken über die übertragenen Datenmengen für einen Workflow zurück.
|
|
"""
|
|
mandate_id, user_id = await get_user_context(current_user)
|
|
|
|
# WorkflowManager mit Benutzerkontext initialisieren
|
|
workflow_manager = get_workflow_manager(mandate_id, user_id)
|
|
|
|
# Use the refactored get_workflow_status method to get data stats
|
|
status = workflow_manager.get_workflow_status(workflow_id)
|
|
if not status:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Workflow mit ID {workflow_id} nicht gefunden"
|
|
)
|
|
|
|
# Gib nur die Datenstatistiken zurück
|
|
if "data_stats" in status:
|
|
return {
|
|
"workflow_id": workflow_id,
|
|
"data_stats": status["data_stats"]
|
|
}
|
|
else:
|
|
return {
|
|
"workflow_id": workflow_id,
|
|
"data_stats": {
|
|
"total_processing_time": 0.0,
|
|
"total_token_count": 0,
|
|
"total_bytes_sent": 0,
|
|
"total_bytes_received": 0
|
|
}
|
|
}
|
|
|
|
@router.post("/{workflow_id}/export", response_model=Dict[str, Any])
|
|
async def export_workflow(
|
|
workflow_id: str,
|
|
export_format: str = Query("json", description="Exportformat ('json', 'csv', 'pdf')"),
|
|
current_user: Dict[str, Any] = Depends(get_current_active_user)
|
|
):
|
|
"""
|
|
Exportiert einen Workflow in verschiedenen Formaten.
|
|
"""
|
|
mandate_id, user_id = await get_user_context(current_user)
|
|
|
|
# WorkflowManager mit Benutzerkontext initialisieren
|
|
workflow_manager = get_workflow_manager(mandate_id, user_id)
|
|
|
|
# Workflow laden
|
|
workflow = await workflow_manager.load_workflow(workflow_id)
|
|
if not workflow:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Workflow mit ID {workflow_id} nicht gefunden"
|
|
)
|
|
|
|
# Export je nach Format durchführen
|
|
export_path = None
|
|
|
|
if export_format == "json":
|
|
# JSON-Export ist einfach der Workflow selbst
|
|
export_path = os.path.join(workflow_manager.results_dir, f"workflow_{workflow_id}_export.json")
|
|
with open(export_path, 'w', encoding='utf-8') as f:
|
|
json.dump(workflow, f, indent=2, ensure_ascii=False)
|
|
|
|
elif export_format == "csv":
|
|
# CSV-Export der Messages und Logs
|
|
from csv import writer
|
|
|
|
export_path = os.path.join(workflow_manager.results_dir, f"workflow_{workflow_id}_messages.csv")
|
|
with open(export_path, 'w', newline='', encoding='utf-8') as f:
|
|
csv_writer = writer(f)
|
|
# Überschriften schreiben
|
|
csv_writer.writerow(["ID", "Sequence", "Role", "Content", "Agent Type", "Created At"])
|
|
# Messages schreiben
|
|
for msg in workflow.get("messages", []):
|
|
csv_writer.writerow([
|
|
msg.get("id", ""),
|
|
msg.get("sequence_no", ""),
|
|
msg.get("role", ""),
|
|
msg.get("content", ""),
|
|
msg.get("agent_type", ""),
|
|
msg.get("started_at", "")
|
|
])
|
|
|
|
# Logs exportieren
|
|
logs_path = os.path.join(workflow_manager.results_dir, f"workflow_{workflow_id}_logs.csv")
|
|
with open(logs_path, 'w', newline='', encoding='utf-8') as f:
|
|
csv_writer = writer(f)
|
|
# Überschriften schreiben
|
|
csv_writer.writerow(["ID", "Type", "Message", "Timestamp", "Agent ID", "Agent Name"])
|
|
# Logs schreiben
|
|
for log in workflow.get("logs", []):
|
|
csv_writer.writerow([
|
|
log.get("id", ""),
|
|
log.get("type", ""),
|
|
log.get("message", ""),
|
|
log.get("timestamp", ""),
|
|
log.get("agent_id", ""),
|
|
log.get("agent_name", "")
|
|
])
|
|
|
|
export_path = [export_path, logs_path] # Beide Dateien zurückgeben
|
|
|
|
elif export_format == "pdf":
|
|
# PDF-Export erfordert zusätzliche Bibliotheken (z.B. reportlab)
|
|
try:
|
|
from reportlab.lib.pagesizes import letter
|
|
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle
|
|
from reportlab.lib.styles import getSampleStyleSheet
|
|
|
|
export_path = os.path.join(workflow_manager.results_dir, f"workflow_{workflow_id}_export.pdf")
|
|
|
|
doc = SimpleDocTemplate(export_path, pagesize=letter)
|
|
styles = getSampleStyleSheet()
|
|
elements = []
|
|
|
|
# Titel
|
|
elements.append(Paragraph(f"Workflow: {workflow.get('name', workflow_id)}", styles['Title']))
|
|
elements.append(Spacer(1, 12))
|
|
|
|
# Workflow-Info
|
|
elements.append(Paragraph("Workflow Information", styles['Heading2']))
|
|
elements.append(Paragraph(f"ID: {workflow_id}", styles['Normal']))
|
|
elements.append(Paragraph(f"Status: {workflow.get('status', 'unknown')}", styles['Normal']))
|
|
elements.append(Paragraph(f"Started: {workflow.get('started_at', '')}", styles['Normal']))
|
|
if workflow.get('completed_at'):
|
|
elements.append(Paragraph(f"Completed: {workflow.get('completed_at')}", styles['Normal']))
|
|
elements.append(Spacer(1, 12))
|
|
|
|
# Messages
|
|
elements.append(Paragraph("Messages", styles['Heading2']))
|
|
|
|
# Tabelle für Messages
|
|
message_data = [["Sequence", "Role", "Agent Type", "Content"]]
|
|
for msg in sorted(workflow.get("messages", []), key=lambda x: x.get("sequence_no", 0)):
|
|
content = msg.get("content", "")
|
|
if len(content) > 500: # Zu lange Inhalte kürzen
|
|
content = content[:500] + "..."
|
|
message_data.append([
|
|
str(msg.get("sequence_no", "")),
|
|
msg.get("role", ""),
|
|
msg.get("agent_type", ""),
|
|
content
|
|
])
|
|
|
|
if len(message_data) > 1: # Nur wenn Messages vorhanden sind
|
|
table = Table(message_data, colWidths=[40, 70, 70, 350])
|
|
table.setStyle(TableStyle([
|
|
('BACKGROUND', (0, 0), (-1, 0), (0.9, 0.9, 0.9)),
|
|
('TEXTCOLOR', (0, 0), (-1, 0), (0, 0, 0)),
|
|
('ALIGN', (0, 0), (-1, -1), 'LEFT'),
|
|
('FONTNAME', (0, 0), (-1, 0), 'Helvetica-Bold'),
|
|
('BOTTOMPADDING', (0, 0), (-1, 0), 12),
|
|
('BACKGROUND', (0, 1), (-1, -1), (0.95, 0.95, 0.95)),
|
|
('GRID', (0, 0), (-1, -1), 1, (0.5, 0.5, 0.5))
|
|
]))
|
|
elements.append(table)
|
|
else:
|
|
elements.append(Paragraph("No messages found", styles['Normal']))
|
|
|
|
elements.append(Spacer(1, 12))
|
|
|
|
# Logs
|
|
elements.append(Paragraph("Logs", styles['Heading2']))
|
|
|
|
# Tabelle für Logs
|
|
log_data = [["Type", "Timestamp", "Agent", "Message"]]
|
|
for log in sorted(workflow.get("logs", []), key=lambda x: x.get("timestamp", "")):
|
|
log_data.append([
|
|
log.get("type", ""),
|
|
log.get("timestamp", ""),
|
|
log.get("agent_name", ""),
|
|
log.get("message", "")
|
|
])
|
|
|
|
if len(log_data) > 1: # Nur wenn Logs vorhanden sind
|
|
table = Table(log_data, colWidths=[60, 120, 80, 270])
|
|
table.setStyle(TableStyle([
|
|
('BACKGROUND', (0, 0), (-1, 0), (0.9, 0.9, 0.9)),
|
|
('TEXTCOLOR', (0, 0), (-1, 0), (0, 0, 0)),
|
|
('ALIGN', (0, 0), (-1, -1), 'LEFT'),
|
|
('FONTNAME', (0, 0), (-1, 0), 'Helvetica-Bold'),
|
|
('BOTTOMPADDING', (0, 0), (-1, 0), 12),
|
|
('BACKGROUND', (0, 1), (-1, -1), (0.95, 0.95, 0.95)),
|
|
('GRID', (0, 0), (-1, -1), 1, (0.5, 0.5, 0.5))
|
|
]))
|
|
elements.append(table)
|
|
else:
|
|
elements.append(Paragraph("No logs found", styles['Normal']))
|
|
|
|
# PDF generieren
|
|
doc.build(elements)
|
|
|
|
except ImportError:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_501_NOT_IMPLEMENTED,
|
|
detail="PDF-Export nicht verfügbar (reportlab nicht installiert)"
|
|
)
|
|
|
|
else:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail=f"Unbekanntes Exportformat: {export_format}"
|
|
)
|
|
|
|
if not export_path:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
detail="Export konnte nicht durchgeführt werden"
|
|
)
|
|
|
|
# Pfad(e) im Ergebnis zurückgeben
|
|
export_result = {
|
|
"workflow_id": workflow_id,
|
|
"format": export_format,
|
|
"path": export_path
|
|
}
|
|
|
|
return export_result
|
|
|
|
@router.delete("/{workflow_id}/messages/{message_id}", response_model=Dict[str, Any])
|
|
async def delete_workflow_message(
|
|
workflow_id: str = Path(..., description="ID des Workflows"),
|
|
message_id: str = Path(..., description="ID der zu löschenden Nachricht"),
|
|
current_user: Dict[str, Any] = Depends(get_current_active_user)
|
|
):
|
|
"""
|
|
Löscht eine einzelne Nachricht aus einem Workflow.
|
|
|
|
Diese Funktion entfernt die Nachricht aus dem Workflow und auch aus der Datenbank.
|
|
"""
|
|
mandate_id, user_id = await get_user_context(current_user)
|
|
|
|
# WorkflowManager mit Benutzerkontext initialisieren
|
|
workflow_manager = get_workflow_manager(mandate_id, user_id)
|
|
|
|
try:
|
|
# Workflow laden
|
|
workflow = await workflow_manager.load_workflow(workflow_id)
|
|
if not workflow:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Workflow mit ID {workflow_id} nicht gefunden"
|
|
)
|
|
|
|
# Prüfen, ob die Nachricht im Workflow vorhanden ist
|
|
if "messages" not in workflow:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Keine Nachrichten im Workflow {workflow_id} gefunden"
|
|
)
|
|
|
|
# Nachricht finden
|
|
message_index = next((i for i, m in enumerate(workflow["messages"])
|
|
if m.get("id") == message_id), -1)
|
|
|
|
if message_index == -1:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Nachricht mit ID {message_id} im Workflow {workflow_id} nicht gefunden"
|
|
)
|
|
|
|
# Nachricht aus dem Workflow entfernen
|
|
deleted_message = workflow["messages"].pop(message_index)
|
|
|
|
# Log über Löschung hinzufügen - using the refactored _add_log method
|
|
workflow_manager._add_log(
|
|
workflow,
|
|
f"Nachricht gelöscht: {deleted_message.get('role', 'unknown')} - {message_id[:8]}...",
|
|
"info"
|
|
)
|
|
|
|
# Workflow speichern
|
|
workflow_manager._save_workflow(workflow)
|
|
|
|
# Bei aktivem LucyDOM-Interface auch dort löschen
|
|
if workflow_manager.lucydom_interface:
|
|
try:
|
|
# Diese Methode muss im LucyDOM-Interface implementiert werden
|
|
workflow_manager.lucydom_interface.delete_workflow_message(workflow_id, message_id)
|
|
except Exception as e:
|
|
# Fehler beim Löschen in der Datenbank loggen, aber nicht scheitern lassen
|
|
logger.warning(f"Nachricht aus Workflow entfernt, aber Fehler beim Löschen aus der Datenbank: {str(e)}")
|
|
|
|
return {
|
|
"workflow_id": workflow_id,
|
|
"message_id": message_id,
|
|
"success": True,
|
|
"message": "Nachricht erfolgreich gelöscht"
|
|
}
|
|
|
|
except HTTPException:
|
|
# Bekannte HTTP-Exceptions weiterleiten
|
|
raise
|
|
except Exception as e:
|
|
# Sonstige Fehler abfangen
|
|
raise HTTPException(
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
detail=f"Fehler beim Löschen der Nachricht: {str(e)}"
|
|
)
|
|
|
|
|
|
@router.delete("/{workflow_id}/messages/{message_id}/files/{file_id}", response_model=Dict[str, Any])
|
|
async def delete_file_from_message(
|
|
workflow_id: str = Path(..., description="ID des Workflows"),
|
|
message_id: str = Path(..., description="ID der Nachricht"),
|
|
file_id: str = Path(..., description="ID der zu löschenden Datei"),
|
|
current_user: Dict[str, Any] = Depends(get_current_active_user)
|
|
):
|
|
"""
|
|
Löscht eine einzelne Datei aus einer Nachricht im Workflow.
|
|
"""
|
|
mandate_id, user_id = await get_user_context(current_user)
|
|
|
|
# Add detailed logging
|
|
logger.debug(f"DELETE request: Remove file {file_id} from message {message_id} in workflow {workflow_id}")
|
|
|
|
# WorkflowManager mit Benutzerkontext initialisieren
|
|
workflow_manager = get_workflow_manager(mandate_id, user_id)
|
|
|
|
try:
|
|
# Workflow laden
|
|
workflow = await workflow_manager.load_workflow(workflow_id)
|
|
if not workflow:
|
|
logger.error(f"Workflow {workflow_id} not found")
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Workflow mit ID {workflow_id} nicht gefunden"
|
|
)
|
|
|
|
# Log workflow info
|
|
logger.debug(f"Workflow found: {workflow.get('name', workflow_id)}")
|
|
|
|
# Print message structure to debug
|
|
if "messages" in workflow:
|
|
logger.debug(f"Workflow has {len(workflow['messages'])} messages")
|
|
for i, msg in enumerate(workflow['messages']):
|
|
logger.debug(f"Message {i+1}: ID={msg.get('id')}, Type={msg.get('agent_type')}")
|
|
|
|
# Nachricht finden - try different approaches
|
|
message = None
|
|
|
|
# First try exact match
|
|
message = next((m for m in workflow.get("messages", []) if m.get("id") == message_id), None)
|
|
|
|
# If not found, try case-insensitive match
|
|
if not message and isinstance(message_id, str):
|
|
message = next((m for m in workflow.get("messages", [])
|
|
if isinstance(m.get("id"), str) and m.get("id").lower() == message_id.lower()), None)
|
|
|
|
# If still not found, try partial match with the beginning of ID
|
|
if not message and isinstance(message_id, str):
|
|
message = next((m for m in workflow.get("messages", [])
|
|
if isinstance(m.get("id"), str) and m.get("id").startswith(message_id)), None)
|
|
|
|
if not message:
|
|
logger.error(f"Message {message_id} not found in workflow {workflow_id}")
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Nachricht mit ID {message_id} im Workflow {workflow_id} nicht gefunden"
|
|
)
|
|
|
|
# Log message info
|
|
logger.debug(f"Message found: {message.get('id')}, type: {message.get('agent_type')}")
|
|
|
|
# Check documents array
|
|
if "documents" not in message or not message["documents"]:
|
|
logger.error(f"No documents in message {message_id}")
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Keine Dateien in der Nachricht {message_id} gefunden"
|
|
)
|
|
|
|
# Debug log documents
|
|
logger.debug(f"Message has {len(message['documents'])} documents")
|
|
for i, doc in enumerate(message["documents"]):
|
|
source = doc.get("source", {})
|
|
logger.debug(f"Document {i+1}: ID={doc.get('id')}, Source ID={source.get('id')}")
|
|
|
|
# Search for file with flexible matching
|
|
found_file = False
|
|
file_index = -1
|
|
found_doc = None
|
|
|
|
# Try all possible variations of file references
|
|
for i, doc in enumerate(message["documents"]):
|
|
doc_id = doc.get("id")
|
|
source = doc.get("source", {})
|
|
source_id = source.get("id")
|
|
|
|
# Try matching different ID formats
|
|
if ((doc_id and doc_id == file_id) or
|
|
(source_id and source_id == file_id) or
|
|
(doc_id and isinstance(doc_id, str) and file_id in doc_id) or
|
|
(source_id and isinstance(source_id, str) and file_id in source_id)):
|
|
file_index = i
|
|
found_file = True
|
|
found_doc = doc
|
|
logger.debug(f"Found file at index {i}: doc_id={doc_id}, source_id={source_id}")
|
|
break
|
|
|
|
if not found_file:
|
|
logger.error(f"File {file_id} not found in message {message_id}")
|
|
raise HTTPException(
|
|
status_code=status.HTTP_404_NOT_FOUND,
|
|
detail=f"Datei mit ID {file_id} in der Nachricht {message_id} nicht gefunden"
|
|
)
|
|
|
|
# Remove file reference
|
|
deleted_file = message["documents"].pop(file_index)
|
|
|
|
# Log removal
|
|
file_name = deleted_file.get("source", {}).get("name", file_id)
|
|
logger.info(f"Removed file {file_name} from message {message_id}")
|
|
|
|
# Add log entry using the refactored _add_log method
|
|
workflow_manager._add_log(
|
|
workflow,
|
|
f"Datei aus Nachricht entfernt: {file_name} (ID: {file_id})",
|
|
"info",
|
|
message.get("agent_id"),
|
|
message.get("agent_type")
|
|
)
|
|
|
|
# Update workflow state
|
|
workflow_manager._save_workflow(workflow)
|
|
|
|
# Update database if available
|
|
if workflow_manager.lucydom_interface:
|
|
try:
|
|
# Pass the file document and message to LucyDOM interface for more robust handling
|
|
workflow_manager.lucydom_interface.update_workflow_message(message["id"], message)
|
|
logger.debug(f"Database updated for message {message_id}")
|
|
except Exception as e:
|
|
logger.warning(f"Database update warning: {str(e)}")
|
|
|
|
return {
|
|
"workflow_id": workflow_id,
|
|
"message_id": message_id,
|
|
"file_id": file_id,
|
|
"success": True,
|
|
"message": "Datei erfolgreich aus der Nachricht gelöscht"
|
|
}
|
|
|
|
except HTTPException:
|
|
# Re-raise HTTP exceptions
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error deleting file: {str(e)}")
|
|
import traceback
|
|
traceback_str = traceback.format_exc()
|
|
logger.error(f"Traceback: {traceback_str}")
|
|
|
|
raise HTTPException(
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
detail=f"Fehler beim Löschen der Datei aus der Nachricht: {str(e)}"
|
|
) |