From 7d21146b6aaea0a23f78f8ec54d1559c38182e0c Mon Sep 17 00:00:00 2001
From: ValueOn AG
Date: Sun, 17 Aug 2025 20:41:37 +0200
Subject: [PATCH] workflow end2end with outlook
---
modules/chat/documents/documentGeneration.py | 62 +-
modules/chat/handling/handlingTasks.py | 78 ++
modules/connectors/connectorDbJson.py | 16 +
modules/interfaces/interfaceAppAccess.py | 3 +
modules/interfaces/interfaceAppObjects.py | 56 +-
modules/interfaces/interfaceChatObjects.py | 42 +-
.../interfaces/interfaceComponentObjects.py | 65 +-
modules/methods/methodOutlook.py | 966 ++++++++++++------
modules/routes/routeDataConnections.py | 15 +-
modules/routes/routeSecurityGoogle.py | 3 +
modules/routes/routeSecurityMsft.py | 3 +
modules/security/auth.py | 18 +
12 files changed, 1007 insertions(+), 320 deletions(-)
diff --git a/modules/chat/documents/documentGeneration.py b/modules/chat/documents/documentGeneration.py
index 1a83ebf5..9f9630de 100644
--- a/modules/chat/documents/documentGeneration.py
+++ b/modules/chat/documents/documentGeneration.py
@@ -41,8 +41,21 @@ class DocumentGenerator:
if mime_type == "application/octet-stream":
content = getattr(doc, 'content', '')
mime_type = detectMimeTypeFromContent(content, doc.filename, self.service)
+
+ # Add result label to filename for document objects too
+ base_filename = doc.filename
+ if hasattr(action, 'execResultLabel') and action.execResultLabel:
+ result_label = action.execResultLabel.strip()
+ if result_label:
+ # Check if filename already starts with resultLabel to avoid duplication
+ if not base_filename.startswith(f"{result_label}-"):
+ base_filename = f"{result_label}-{base_filename}"
+ logger.info(f"Added resultLabel '{result_label}' as prefix to document object filename: {base_filename}")
+ else:
+ logger.info(f"Document object filename already has resultLabel prefix: {base_filename}")
+
return {
- 'filename': doc.filename,
+ 'filename': base_filename,
'fileSize': getattr(doc, 'fileSize', 0),
'mimeType': mime_type,
'content': getattr(doc, 'content', ''),
@@ -50,8 +63,33 @@ class DocumentGenerator:
}
elif isinstance(doc, dict):
# Dictionary format document - handle both 'documentName' and 'filename' keys
- filename = doc.get('documentName', doc.get('filename', \
- f"{action.execMethod}_{action.execAction}_{datetime.now(UTC).strftime('%Y%m%d_%H%M%S')}"))
+ base_filename = doc.get('documentName', doc.get('filename', ''))
+
+ # Debug logging for resultLabel
+ if hasattr(action, 'execResultLabel'):
+ logger.info(f"Action {action.execMethod}.{action.execAction} has execResultLabel: '{action.execResultLabel}' (type: {type(action.execResultLabel)})")
+ else:
+ logger.info(f"Action {action.execMethod}.{action.execAction} has NO execResultLabel attribute")
+
+ # If no filename provided, generate one with action info
+ if not base_filename:
+ base_filename = f"{action.execMethod}_{action.execAction}_{datetime.now(UTC).strftime('%Y%m%d_%H%M%S')}"
+
+ # ALWAYS add result label to filename for better document selection
+ # This ensures consistent naming regardless of whether filename was provided or generated
+ if hasattr(action, 'execResultLabel') and action.execResultLabel:
+ result_label = action.execResultLabel.strip()
+ if result_label:
+ # Check if filename already starts with resultLabel to avoid duplication
+ if not base_filename.startswith(f"{result_label}-"):
+ base_filename = f"{result_label}-{base_filename}"
+ logger.info(f"Added resultLabel '{result_label}' as prefix to filename: {base_filename}")
+ else:
+ logger.info(f"Filename already has resultLabel prefix: {base_filename}")
+ else:
+ logger.info(f"No resultLabel available for action {action.execMethod}.{action.execAction}")
+
+ filename = base_filename
mimeType = doc.get('mimeType', 'application/octet-stream')
# Handle documentData structure - it might be a dict with 'content' key or direct content
@@ -85,7 +123,23 @@ class DocumentGenerator:
else:
# Unknown document type
logger.warning(f"Unknown document type for action {action.execMethod}.{action.execAction}: {type(doc)}")
- filename = f"{action.execMethod}_{action.execAction}_{datetime.now(UTC).strftime('%Y%m%d_%H%M%S')}"
+ base_filename = f"{action.execMethod}_{action.execAction}_{datetime.now(UTC).strftime('%Y%m%d_%H%M%S')}"
+
+ # ALWAYS add result label to filename for better document selection
+ # This ensures consistent naming regardless of document type
+ if hasattr(action, 'execResultLabel') and action.execResultLabel:
+ result_label = action.execResultLabel.strip()
+ if result_label:
+ # Check if filename already starts with resultLabel to avoid duplication
+ if not base_filename.startswith(f"{result_label}-"):
+ base_filename = f"{result_label}-{base_filename}"
+ logger.info(f"Added resultLabel '{result_label}' as prefix to fallback filename: {base_filename}")
+ else:
+ logger.info(f"Fallback filename already has resultLabel prefix: {base_filename}")
+ else:
+ logger.info(f"No resultLabel available for action {action.execMethod}.{action.execAction}")
+
+ filename = base_filename
mimeType = detectMimeTypeFromContent(doc, filename, self.service)
return {
'filename': filename,
diff --git a/modules/chat/handling/handlingTasks.py b/modules/chat/handling/handlingTasks.py
index c588e0a7..27e1cabe 100644
--- a/modules/chat/handling/handlingTasks.py
+++ b/modules/chat/handling/handlingTasks.py
@@ -368,6 +368,45 @@ class HandlingTasks:
continue
else:
logger.error(f"=== TASK {task_index or '?'} FAILED: {task_step.objective} after {attempt+1} attempts ===")
+
+ # Create user-facing error message for task failure
+ error_message = f"❌ Task {task_index or '?'} - '{task_step.objective}' failed after {attempt+1} attempts\n\n"
+ error_message += f"Objective: {task_step.objective}\n\n"
+
+ # Add specific error details if available
+ if error:
+ error_message += f"Error: {error}\n\n"
+
+ # Add retry information
+ error_message += f"Attempts: {attempt+1}\n"
+ error_message += f"Status: Will retry automatically\n\n"
+ error_message += "The system will attempt to retry this task. Please wait..."
+
+ # Create workflow message for user
+ message_data = {
+ "workflowId": workflow.id,
+ "role": "assistant",
+ "message": error_message,
+ "status": "step",
+ "sequenceNr": len(workflow.messages) + 1,
+ "publishedAt": datetime.now(UTC).isoformat(),
+ "actionId": None,
+ "actionMethod": "task",
+ "actionName": "task_retry",
+ "documentsLabel": None,
+ "documents": []
+ }
+
+ try:
+ message = self.chatInterface.createWorkflowMessage(message_data)
+ if message:
+ workflow.messages.append(message)
+ logger.info(f"Created user-facing retry message for failed task: {task_step.objective}")
+ else:
+ logger.error(f"Failed to create user-facing retry message for failed task: {task_step.objective}")
+ except Exception as e:
+ logger.error(f"Error creating user-facing retry message: {str(e)}")
+
return TaskResult(
taskId=task_step.id,
status=TaskStatus.FAILED,
@@ -376,6 +415,45 @@ class HandlingTasks:
error=error
)
logger.error(f"=== TASK {task_index or '?'} FAILED AFTER ALL RETRIES: {task_step.objective} ===")
+
+ # Create user-facing error message for task failure
+ error_message = f"❌ Task {task_index or '?'} - '{task_step.objective}' failed after all retries\n\n"
+ error_message += f"Objective: {task_step.objective}\n\n"
+
+ # Add specific error details if available
+ if error and error != "Task failed after all retries.":
+ error_message += f"Error: {error}\n\n"
+
+ # Add retry information
+ error_message += f"Retries attempted: {retry_context.retry_count if retry_context else 'Unknown'}\n"
+ error_message += f"Status: Task failed permanently\n\n"
+ error_message += "Please check the connection and try again, or contact support if the issue persists."
+
+ # Create workflow message for user
+ message_data = {
+ "workflowId": workflow.id,
+ "role": "assistant",
+ "message": error_message,
+ "status": "step",
+ "sequenceNr": len(workflow.messages) + 1,
+ "publishedAt": datetime.now(UTC).isoformat(),
+ "actionId": None,
+ "actionMethod": "task",
+ "actionName": "task_failure",
+ "documentsLabel": None,
+ "documents": []
+ }
+
+ try:
+ message = self.chatInterface.createWorkflowMessage(message_data)
+ if message:
+ workflow.messages.append(message)
+ logger.info(f"Created user-facing error message for failed task: {task_step.objective}")
+ else:
+ logger.error(f"Failed to create user-facing error message for failed task: {task_step.objective}")
+ except Exception as e:
+ logger.error(f"Error creating user-facing error message: {str(e)}")
+
return TaskResult(
taskId=task_step.id,
status=TaskStatus.FAILED,
diff --git a/modules/connectors/connectorDbJson.py b/modules/connectors/connectorDbJson.py
index d6ebc877..27b87b2d 100644
--- a/modules/connectors/connectorDbJson.py
+++ b/modules/connectors/connectorDbJson.py
@@ -383,6 +383,22 @@ class DatabaseConnector:
self._tablesCache = {}
self._tableMetadataCache = {}
+ def clearTableCache(self, table: str) -> None:
+ """Clears cache for a specific table to ensure fresh data."""
+ if table in self._tablesCache:
+ del self._tablesCache[table]
+ logger.debug(f"Cleared cache for table: {table}")
+
+ if table in self._tableMetadataCache:
+ del self._tableMetadataCache[table]
+ logger.debug(f"Cleared metadata cache for table: {table}")
+
+ def clearAllCache(self) -> None:
+ """Clears all cache to ensure completely fresh data."""
+ self._tablesCache.clear()
+ self._tableMetadataCache.clear()
+ logger.debug("Cleared all database cache")
+
# Public API
def getTables(self) -> List[str]:
diff --git a/modules/interfaces/interfaceAppAccess.py b/modules/interfaces/interfaceAppAccess.py
index 262ab3eb..b910c002 100644
--- a/modules/interfaces/interfaceAppAccess.py
+++ b/modules/interfaces/interfaceAppAccess.py
@@ -235,6 +235,9 @@ class AppAccess:
"lastActivity": datetime.now()
})
+ # Clear cache to ensure fresh data
+ self.db.clearTableCache("sessions")
+
return True
except Exception as e:
diff --git a/modules/interfaces/interfaceAppObjects.py b/modules/interfaces/interfaceAppObjects.py
index 56397be9..a3de6f69 100644
--- a/modules/interfaces/interfaceAppObjects.py
+++ b/modules/interfaces/interfaceAppObjects.py
@@ -194,7 +194,11 @@ class AppObjects:
Boolean indicating permission
"""
return self.access.canModify(table, recordId)
-
+
+ def _clearTableCache(self, table: str) -> None:
+ """Clears the cache for a specific table to ensure fresh data."""
+ self.db.clearTableCache(table)
+
def getInitialId(self, table: str) -> Optional[str]:
"""Returns the initial ID for a table."""
return self.db.getInitialId(table)
@@ -352,6 +356,9 @@ class AppObjects:
# Save to connections table
self.db.recordCreate("connections", connection.to_dict())
+ # Clear cache to ensure fresh data
+ self._clearTableCache("connections")
+
return connection
except Exception as e:
@@ -372,6 +379,9 @@ class AppObjects:
# Delete connection
self.db.recordDelete("connections", connectionId)
+ # Clear cache to ensure fresh data
+ self._clearTableCache("connections")
+
except Exception as e:
logger.error(f"Error removing user connection: {str(e)}")
raise ValueError(f"Failed to remove user connection: {str(e)}")
@@ -379,8 +389,7 @@ class AppObjects:
def authenticateLocalUser(self, username: str, password: str) -> Optional[User]:
"""Authenticates a user by username and password using local authentication."""
# Clear the users table from cache and reload it
- if "users" in self.db._tablesCache:
- del self.db._tablesCache["users"]
+ self._clearTableCache("users")
# Get user by username
user = self.getUserByUsername(username)
@@ -445,6 +454,9 @@ class AppObjects:
if not createdRecord or not createdRecord.get("id"):
raise ValueError("Failed to create user record")
+ # Clear cache to ensure fresh data
+ self._clearTableCache("users")
+
# Add external connection if provided
if externalId and externalUsername:
self.addUserConnection(
@@ -460,11 +472,8 @@ class AppObjects:
if not createdUser or len(createdUser) == 0:
raise ValueError("Failed to retrieve created user")
- # Clear both table and metadata caches
- if hasattr(self.db, '_tablesCache') and "users" in self.db._tablesCache:
- del self.db._tablesCache["users"]
- if hasattr(self.db, '_tableMetadataCache') and "users" in self.db._tableMetadataCache:
- del self.db._tableMetadataCache["users"]
+ # Clear cache to ensure fresh data (already done above)
+ # No need for additional cache clearing since _clearTableCache("users") was called
return User.from_dict(createdUser[0])
@@ -491,6 +500,9 @@ class AppObjects:
# Update user record
self.db.recordModify("users", userId, updatedUser.to_dict())
+ # Clear cache to ensure fresh data
+ self._clearTableCache("users")
+
# Get updated user
updatedUser = self.getUser(userId)
if not updatedUser:
@@ -562,11 +574,8 @@ class AppObjects:
if not success:
raise ValueError(f"Failed to delete user {userId}")
- # Clear both table and metadata caches
- if hasattr(self.db, '_tablesCache') and "users" in self.db._tablesCache:
- del self.db._tablesCache["users"]
- if hasattr(self.db, '_tableMetadataCache') and "users" in self.db._tableMetadataCache:
- del self.db._tableMetadataCache["users"]
+ # Clear cache to ensure fresh data
+ self._clearTableCache("users")
logger.info(f"User {userId} successfully deleted")
return True
@@ -611,6 +620,9 @@ class AppObjects:
if not createdRecord or not createdRecord.get("id"):
raise ValueError("Failed to create mandate record")
+ # Clear cache to ensure fresh data
+ self._clearTableCache("mandates")
+
return Mandate.from_dict(createdRecord)
def updateMandate(self, mandateId: str, updateData: Dict[str, Any]) -> Mandate:
@@ -637,6 +649,9 @@ class AppObjects:
# Update mandate record
self.db.recordModify("mandates", mandateId, updatedMandate.to_dict())
+ # Clear cache to ensure fresh data
+ self._clearTableCache("mandates")
+
# Get updated mandate
updatedMandate = self.getMandate(mandateId)
if not updatedMandate:
@@ -665,7 +680,12 @@ class AppObjects:
raise ValueError(f"Cannot delete mandate {mandateId} with existing users")
# Delete mandate
- return self.db.recordDelete("mandates", mandateId)
+ success = self.db.recordDelete("mandates", mandateId)
+
+ # Clear cache to ensure fresh data
+ self._clearTableCache("mandates")
+
+ return success
except Exception as e:
logger.error(f"Error deleting mandate: {str(e)}")
@@ -747,6 +767,9 @@ class AppObjects:
# Save to database
self.db.recordCreate("tokens", token_dict)
+ # Clear cache to ensure fresh data
+ self._clearTableCache("tokens")
+
logger.debug(f"Token saved for user {self.currentUser.id} with authority {token.authority}")
except Exception as e:
@@ -792,7 +815,10 @@ class AppObjects:
# Delete each token
for token in tokens:
self.db.recordDelete("tokens", token["id"])
-
+
+ # Clear cache to ensure fresh data
+ self._clearTableCache("tokens")
+
except Exception as e:
logger.error(f"Error deleting token: {str(e)}")
raise
diff --git a/modules/interfaces/interfaceChatObjects.py b/modules/interfaces/interfaceChatObjects.py
index 7105f5de..7101ecd6 100644
--- a/modules/interfaces/interfaceChatObjects.py
+++ b/modules/interfaces/interfaceChatObjects.py
@@ -121,6 +121,10 @@ class ChatObjects:
"""Delegate to access control module."""
return self.access.canModify(table, recordId)
+ def _clearTableCache(self, table: str) -> None:
+ """Clears the cache for a specific table to ensure fresh data."""
+ self.db.clearTableCache(table)
+
# Utilities
def getInitialId(self, table: str) -> Optional[str]:
@@ -196,6 +200,9 @@ class ChatObjects:
# Create workflow in database
created = self.db.recordCreate("workflows", workflowData)
+ # Clear cache to ensure fresh data
+ self._clearTableCache("workflows")
+
# Convert to ChatWorkflow model
return ChatWorkflow(
id=created["id"],
@@ -226,6 +233,9 @@ class ChatObjects:
# Update workflow in database
updated = self.db.recordModify("workflows", workflowId, workflowData)
+ # Clear cache to ensure fresh data
+ self._clearTableCache("workflows")
+
# Convert to ChatWorkflow model
return ChatWorkflow(
id=updated["id"],
@@ -256,7 +266,12 @@ class ChatObjects:
raise PermissionError(f"No permission to delete workflow {workflowId}")
# Delete workflow
- return self.db.recordDelete("workflows", workflowId)
+ success = self.db.recordDelete("workflows", workflowId)
+
+ # Clear cache to ensure fresh data
+ self._clearTableCache("workflows")
+
+ return success
# Workflow Messages
@@ -328,6 +343,9 @@ class ChatObjects:
# Create message in database
createdMessage = self.db.recordCreate("workflowMessages", messageData)
+ # Clear cache to ensure fresh data
+ self._clearTableCache("workflowMessages")
+
# Convert to ChatMessage model
return ChatMessage(
id=createdMessage["id"],
@@ -411,6 +429,9 @@ class ChatObjects:
updatedMessage = self.db.recordModify("workflowMessages", messageId, messageData)
if updatedMessage:
logger.debug(f"Message {messageId} updated successfully")
+
+ # Clear cache to ensure fresh data
+ self._clearTableCache("workflowMessages")
else:
logger.warning(f"Failed to update message {messageId}")
@@ -440,7 +461,12 @@ class ChatObjects:
return False
# Delete the message from the database
- return self.db.recordDelete("workflowMessages", messageId)
+ success = self.db.recordDelete("workflowMessages", messageId)
+
+ # Clear cache to ensure fresh data
+ self._clearTableCache("workflowMessages")
+
+ return success
except Exception as e:
logger.error(f"Error deleting message {messageId}: {str(e)}")
return False
@@ -709,6 +735,9 @@ class ChatObjects:
# Create log in database
createdLog = self.db.recordCreate("workflowLogs", log_model.to_dict())
+ # Clear cache to ensure fresh data
+ self._clearTableCache("workflowLogs")
+
# Return validated ChatLog instance
return ChatLog(**createdLog)
@@ -1078,6 +1107,9 @@ class ChatObjects:
# Create task in database
createdTask = self.db.recordCreate("tasks", taskData)
+ # Clear cache to ensure fresh data
+ self._clearTableCache("tasks")
+
# Convert to TaskItem model
task = TaskItem(
id=createdTask["id"],
@@ -1130,6 +1162,9 @@ class ChatObjects:
# Update task in database
updatedTask = self.db.recordModify("tasks", taskId, taskData)
+ # Clear cache to ensure fresh data
+ self._clearTableCache("tasks")
+
# Convert to TaskItem model
return TaskItem(
id=updatedTask["id"],
@@ -1178,6 +1213,9 @@ class ChatObjects:
if taskId in workflowTasks:
workflowTasks.remove(taskId)
self.updateWorkflow(task.workflowId, {"tasks": workflowTasks})
+
+ # Clear cache to ensure fresh data
+ self._clearTableCache("tasks")
return True
return False
diff --git a/modules/interfaces/interfaceComponentObjects.py b/modules/interfaces/interfaceComponentObjects.py
index 0041da96..09a149c4 100644
--- a/modules/interfaces/interfaceComponentObjects.py
+++ b/modules/interfaces/interfaceComponentObjects.py
@@ -235,6 +235,10 @@ class ComponentObjects:
"""Delegate to access control module."""
return self.access.canModify(table, recordId)
+ def _clearTableCache(self, table: str) -> None:
+ """Clears the cache for a specific table to ensure fresh data."""
+ self.db.clearTableCache(table)
+
# Utilities
def getInitialId(self, table: str) -> Optional[str]:
@@ -279,6 +283,9 @@ class ComponentObjects:
if not createdRecord or not createdRecord.get("id"):
raise ValueError("Failed to create prompt record")
+ # Clear cache to ensure fresh data
+ self._clearTableCache("prompts")
+
return createdRecord
def updatePrompt(self, promptId: str, updateData: Dict[str, Any]) -> Dict[str, Any]:
@@ -292,6 +299,9 @@ class ComponentObjects:
# Update prompt record directly with the update data
self.db.recordModify("prompts", promptId, updateData)
+ # Clear cache to ensure fresh data
+ self._clearTableCache("prompts")
+
# Get updated prompt
updatedPrompt = self.getPrompt(promptId)
if not updatedPrompt:
@@ -313,7 +323,13 @@ class ComponentObjects:
if not self._canModify("prompts", promptId):
raise PermissionError(f"No permission to delete prompt {promptId}")
- return self.db.recordDelete("prompts", promptId)
+ # Delete prompt
+ success = self.db.recordDelete("prompts", promptId)
+
+ # Clear cache to ensure fresh data
+ self._clearTableCache("prompts")
+
+ return success
# File Utilities
@@ -528,6 +544,10 @@ class ComponentObjects:
# Store in database
self.db.recordCreate("files", fileItem.to_dict())
+
+ # Clear cache to ensure fresh data
+ self._clearTableCache("files")
+
return fileItem
def updateFile(self, fileId: str, updateData: Dict[str, Any]) -> Dict[str, Any]:
@@ -545,7 +565,12 @@ class ComponentObjects:
updateData["filename"] = self._generateUniqueFilename(updateData["filename"], fileId)
# Update file
- return self.db.recordModify("files", fileId, updateData)
+ success = self.db.recordModify("files", fileId, updateData)
+
+ # Clear cache to ensure fresh data
+ self._clearTableCache("files")
+
+ return success
def deleteFile(self, fileId: str) -> bool:
"""Deletes a file if user has access."""
@@ -576,7 +601,12 @@ class ComponentObjects:
logger.warning(f"Error deleting FileData for file {fileId}: {str(e)}")
# Delete the FileItem entry
- return self.db.recordDelete("files", fileId)
+ success = self.db.recordDelete("files", fileId)
+
+ # Clear cache to ensure fresh data
+ self._clearTableCache("files")
+
+ return success
except FileNotFoundError as e:
raise
@@ -634,6 +664,10 @@ class ComponentObjects:
}
self.db.recordCreate("fileData", fileDataObj)
+
+ # Clear cache to ensure fresh data
+ self._clearTableCache("fileData")
+
logger.debug(f"Successfully stored data for file {fileId} (base64Encoded: {base64Encoded})")
return True
except Exception as e:
@@ -668,8 +702,25 @@ class ComponentObjects:
# Decode base64 to bytes
return base64.b64decode(data)
else:
- # Convert text to bytes
- return data.encode('utf-8')
+ # Check if this is supposed to be a binary file based on mime type
+ mimeType = file.mimeType
+ isTextFormat = self.isTextMimeType(mimeType)
+
+ if isTextFormat:
+ # This is a text file, encode to bytes as expected
+ return data.encode('utf-8')
+ else:
+ # This is a binary file that was incorrectly stored as text
+ # Try to decode it as if it was base64 (common fallback scenario)
+ try:
+ logger.warning(f"Binary file {fileId} ({mimeType}) was stored as text, attempting base64 decode")
+ return base64.b64decode(data)
+ except Exception as base64_error:
+ logger.error(f"Failed to decode binary file {fileId} as base64: {str(base64_error)}")
+ # Last resort: return the data as-is (might be corrupted)
+ logger.warning(f"Returning raw data for file {fileId} - file may be corrupted")
+ return data.encode('utf-8') if isinstance(data, str) else data
+
except Exception as e:
logger.error(f"Error processing file data for {fileId}: {str(e)}")
return None
@@ -810,7 +861,11 @@ class ComponentObjects:
self.db.recordCreate("fileData", dataUpdate)
logger.debug(f"Created new file data for file ID {fileId} (base64Encoded: {base64Encoded})")
+ # Clear cache to ensure fresh data
+ self._clearTableCache("fileData")
+
return True
+
except Exception as e:
logger.error(f"Error updating data for file {fileId}: {str(e)}")
return False
diff --git a/modules/methods/methodOutlook.py b/modules/methods/methodOutlook.py
index 8727c00e..5a2c7f12 100644
--- a/modules/methods/methodOutlook.py
+++ b/modules/methods/methodOutlook.py
@@ -1,6 +1,78 @@
"""
-Outlook method module.
-Handles Outlook operations using the Outlook service.
+Microsoft Outlook Email Operations Module
+
+This module provides actions for composing and sending emails via Microsoft Outlook using the Microsoft Graph API.
+
+ACTION CONTRACT DEFINITION:
+==========================
+
+1. COMPOSE EMAIL ACTION (composeEmail):
+ ====================================
+
+ Purpose: Use AI to compose professional email content
+
+ Input Parameters:
+ - context (str): Email context/requirements
+ - recipient (str, optional): Recipient information
+ - attachments (List[str], optional): Available documents to reference
+ - tone (str, optional): Email tone (formal, casual, etc.)
+ - expectedDocumentFormats (list, optional): Ignored - always produces JSON
+
+ Output Contract:
+ The action produces a JSON document with this EXACT structure:
+ {
+ "context": "original context",
+ "recipient": "recipient info",
+ "tone": "email tone",
+ "timestamp": "ISO timestamp",
+ "usage": "usage description",
+ "to": ["email@example.com"],
+ "subject": "Email subject",
+ "body": "Email body content",
+ "cc": [],
+ "bcc": [],
+ "attachments": ["docItem:uuid:filename.pdf"]
+ }
+
+ Key Points:
+ - Email fields (to, subject, body, cc, bcc, attachments) are at ROOT LEVEL
+ - NOT wrapped in a "composedEmail" field
+ - Always produces .json format regardless of expectedDocumentFormats
+ - AI response is validated and parsed before output
+
+2. SEND EMAIL ACTION (sendEmail):
+ ==============================
+
+ Purpose: Send the composed email via Outlook (creates draft)
+
+ Input Parameters:
+ - connectionReference (str): Microsoft connection reference
+ - composedEmail (str): Reference to composed email document (docItem:...)
+ - expectedDocumentFormats (list, optional): Expected output formats
+
+ Input Contract:
+ The composedEmail document MUST have this EXACT structure:
+ {
+ "to": ["email@example.com"],
+ "subject": "Email subject",
+ "body": "Email body content",
+ "cc": [],
+ "bcc": [],
+ "attachments": ["docItem:uuid:filename.pdf"]
+ }
+
+ Key Points:
+ - Email fields must be at ROOT LEVEL
+ - NOT wrapped in a nested structure
+ - Reads file content from database using fileId
+ - Creates email draft in Outlook Drafts folder
+ - Returns success/failure status
+
+DATA FLOW:
+==========
+composeEmail → JSON Document → sendEmail → Outlook Draft
+
+The contract ensures that composeEmail outputs exactly what sendEmail expects to consume.
"""
import logging
@@ -29,24 +101,27 @@ class MethodOutlook(MethodBase):
"""
try:
# Get the connection from the service
- userConnection = self.service.getUserConnection(connectionReference)
+ userConnection = self.service.getUserConnectionFromConnectionReference(connectionReference)
if not userConnection:
logger.error(f"Connection not found: {connectionReference}")
return None
# Get the token for this connection
- token = self.service.getTokenForConnection(userConnection.id)
+ token = self.service.interfaceApp.getToken(userConnection.authority.value)
if not token:
logger.error(f"Token not found for connection: {userConnection.id}")
return None
- # Check if token is valid
- if not token.isValid():
- logger.error(f"Token is invalid for connection: {userConnection.id}")
- return None
+ # Check if token is expired
+ if hasattr(token, 'expiresAt') and token.expiresAt:
+ import time
+ current_time = time.time()
+ if current_time > token.expiresAt:
+ logger.error(f"Token for connection {userConnection.id} is expired (expiresAt: {token.expiresAt}, current: {current_time})")
+ return None
# Check if connection is active
- if userConnection.status != ConnectionStatus.active:
+ if userConnection.status.value != "active":
logger.error(f"Connection is not active: {userConnection.id}, status: {userConnection.status.value}")
return None
@@ -310,41 +385,19 @@ class MethodOutlook(MethodBase):
logger.info(f"Successfully retrieved {len(emails_data.get('value', []))} emails from {folder}")
except ImportError:
- logger.error("requests module not available, falling back to simulation")
- # Fallback to simulation if requests module is not available
- email_prompt = f"""
- Simulate reading emails from Microsoft Outlook.
-
- Connection: {connection['id']}
- Folder: {folder}
- Limit: {limit}
- Filter: {filter or 'None'}
-
- Please provide:
- 1. List of emails with subject, sender, date, and content
- 2. Summary of email statistics
- 3. Important or urgent emails highlighted
- 4. Email categorization if possible
- """
- email_data = await self.service.interfaceAiCalls.callAiTextAdvanced(email_prompt)
+ logger.error("requests module not available")
+ return self._createResult(
+ success=False,
+ data={},
+ error="requests module not available"
+ )
except Exception as e:
logger.error(f"Error reading emails from Microsoft Graph API: {str(e)}")
- # Fallback to simulation on API error
- email_prompt = f"""
- Simulate reading emails from Microsoft Outlook.
-
- Connection: {connection['id']}
- Folder: {folder}
- Limit: {limit}
- Filter: {filter or 'None'}
-
- Please provide:
- 1. List of emails with subject, sender, date, and content
- 2. Summary of email statistics
- 3. Important or urgent emails highlighted
- 4. Email categorization if possible
- """
- email_data = await self.service.interfaceAiCalls.callAiTextAdvanced(email_prompt)
+ return self._createResult(
+ success=False,
+ data={},
+ error=f"Failed to read emails: {str(e)}"
+ )
# Create result data
result_data = {
@@ -398,33 +451,39 @@ class MethodOutlook(MethodBase):
@action
async def sendEmail(self, parameters: Dict[str, Any]) -> ActionResult:
"""
- Create email draft in Outlook for sending out
+ Send email via Outlook using composed email content
+
+ This action takes a composed email document and sends it via Outlook.
+ The composed email must contain all necessary email details (recipients, subject, body, attachments).
Parameters:
connectionReference (str): Reference to the Microsoft connection
- to (List[str]): List of recipient email addresses
- subject (str): Email subject
- body (str): Email body content
- cc (List[str], optional): CC recipients
- bcc (List[str], optional): BCC recipients
- attachments (List[str], optional): List of document references to attach
+ composedEmail (str): Reference to the composed email document (docItem:...)
expectedDocumentFormats (list, optional): Expected document formats with extension, mimeType, description
+
+ Input Contract:
+ The composedEmail document must have the following structure:
+ {
+ "to": ["email@example.com"],
+ "subject": "Email subject",
+ "body": "Email body content",
+ "cc": [],
+ "bcc": [],
+ "attachments": ["docItem:uuid:filename.pdf"]
+ }
+
+ Note: Email fields must be at root level, not wrapped in a nested structure.
"""
try:
connectionReference = parameters.get("connectionReference")
- to = parameters.get("to")
- subject = parameters.get("subject")
- body = parameters.get("body")
- cc = parameters.get("cc", [])
- bcc = parameters.get("bcc", [])
- attachments = parameters.get("attachments", [])
+ composed_email_ref = parameters.get("composedEmail")
expectedDocumentFormats = parameters.get("expectedDocumentFormats", [])
- if not connectionReference or not to or not subject or not body:
+ if not connectionReference or not composed_email_ref:
return self._createResult(
success=False,
data={},
- error="Connection reference, to, subject, and body are required"
+ error="Connection reference and composed email reference are required"
)
# Get Microsoft connection
@@ -435,16 +494,166 @@ class MethodOutlook(MethodBase):
data={},
error="Failed to get Microsoft connection"
)
-
- # Check permissions before proceeding
- permissions_ok = await self._checkPermissions(connection)
- if not permissions_ok:
+
+ # Get the composed email document
+ logger.info(f"Getting composed email document: {composed_email_ref}")
+ composed_email_docs = self.service.getChatDocumentsFromDocumentList([composed_email_ref])
+ if not composed_email_docs or len(composed_email_docs) == 0:
+ logger.error(f"Could not find composed email document: {composed_email_ref}")
return self._createResult(
success=False,
data={},
- error="Connection lacks necessary permissions. Please re-authenticate with updated permissions."
+ error=f"Could not find composed email document: {composed_email_ref}"
)
+ logger.info(f"Found {len(composed_email_docs)} composed email documents")
+ composed_email_doc = composed_email_docs[0]
+ logger.info(f"Composed email document: {composed_email_doc}")
+
+ # Extract email details from the composed email document
+ try:
+ logger.info(f"Extracting email details from document...")
+
+ # Get the actual file content from the database
+ # The document object has fileId, but we need to read the actual file content
+ file_id = getattr(composed_email_doc, 'fileId', None)
+ if not file_id:
+ logger.error("Document has no fileId attribute")
+ return self._createResult(
+ success=False,
+ data={},
+ error="Composed email document has no fileId"
+ )
+
+ logger.info(f"Reading file content from fileId: {file_id}")
+
+ # Read the actual file content from the database
+ try:
+ # Use the correct service interface to read file data
+ file_content = self.service.getFileData(file_id)
+ if not file_content:
+ logger.error(f"Failed to read file content for fileId: {file_id}")
+ return self._createResult(
+ success=False,
+ data={},
+ error="Failed to read composed email file content"
+ )
+
+ logger.info(f"Successfully read file content, length: {len(str(file_content))}")
+
+ # Convert bytes to string if needed
+ if isinstance(file_content, bytes):
+ email_data = file_content.decode('utf-8')
+ logger.info(f"Converted bytes to string, content length: {len(email_data)}")
+ else:
+ email_data = str(file_content)
+ logger.info(f"Content is already string, length: {len(email_data)}")
+
+ # Debug: show first 200 characters of content
+ preview = email_data[:200] + "..." if len(email_data) > 200 else email_data
+ logger.info(f"Content preview: {repr(preview)}")
+
+ except Exception as e:
+ logger.error(f"Error reading file content: {str(e)}")
+ return self._createResult(
+ success=False,
+ data={},
+ error=f"Failed to read file content: {str(e)}"
+ )
+
+ # Parse the email data (should be JSON)
+ if isinstance(email_data, str):
+ import json
+ try:
+ # First try to parse as direct JSON
+ parsed_email_data = json.loads(email_data)
+ logger.info("Successfully parsed email data as direct JSON")
+ email_data = parsed_email_data # Now email_data is the parsed dictionary
+ except json.JSONDecodeError as e:
+ logger.error(f"JSON parsing error: {str(e)}")
+ logger.error(f"Content that failed to parse: {repr(email_data[:500])}")
+
+ # If that fails, try to extract JSON from HTML content
+ logger.info("Direct JSON parsing failed, trying to extract from HTML content...")
+ import re
+
+ # Look for JSON content within HTML tags or as a script
+ json_pattern = r'\{[^{}]*"to"[^{}]*"subject"[^{}]*"body"[^{}]*\}'
+ json_match = re.search(json_pattern, email_data, re.DOTALL)
+
+ if json_match:
+ try:
+ extracted_json = json_match.group(0)
+ parsed_email_data = json.loads(extracted_json)
+ logger.info("Successfully extracted and parsed JSON from HTML content")
+ email_data = parsed_email_data # Now email_data is the parsed dictionary
+ except json.JSONDecodeError as e2:
+ logger.error(f"Failed to parse extracted JSON: {str(e2)}")
+ logger.error(f"Extracted content: {repr(extracted_json)}")
+ return self._createResult(
+ success=False,
+ data={},
+ error="Could not parse JSON content from composed email document"
+ )
+ else:
+ logger.error("No JSON content found in HTML document")
+ return self._createResult(
+ success=False,
+ data={},
+ error="Composed email document content is not valid JSON and no JSON could be extracted"
+ )
+ else:
+ logger.error(f"Unexpected email_data type: {type(email_data)}")
+ return self._createResult(
+ success=False,
+ data={},
+ error=f"Unexpected email data type: {type(email_data)}, expected string"
+ )
+
+ # At this point, email_data should be a parsed dictionary
+ logger.info(f"Final email_data type: {type(email_data)}")
+ if isinstance(email_data, dict):
+ logger.info(f"Available keys: {list(email_data.keys())}")
+
+ # Extract email fields - now they should be at root level
+ to = email_data.get("to", [])
+ subject = email_data.get("subject", "")
+ body = email_data.get("body", "")
+ cc = email_data.get("cc", [])
+ bcc = email_data.get("bcc", [])
+ attachments = email_data.get("attachments", [])
+
+ # Validate required fields
+ if not to or not subject or not body:
+ logger.error(f"Missing required fields. Available keys: {list(email_data.keys())}")
+ return self._createResult(
+ success=False,
+ data={},
+ error="Composed email must contain 'to', 'subject', and 'body' fields"
+ )
+
+ logger.info(f"Extracted email details: to={to}, subject='{subject}', body length={len(body)}, attachments={len(attachments)}")
+
+ except Exception as e:
+ logger.error(f"Error parsing composed email document: {str(e)}")
+ return self._createResult(
+ success=False,
+ data={},
+ error=f"Failed to parse composed email document: {str(e)}"
+ )
+
+ # Check permissions before proceeding
+ logger.info("Checking Microsoft Graph API permissions...")
+ permissions_ok = await self._checkPermissions(connection)
+ if not permissions_ok:
+ logger.error("Permission check failed")
+ return self._createResult(
+ success=False,
+ data={},
+ error="Connection lacks necessary permissions for Outlook operations"
+ )
+ logger.info("Permission check passed")
+
# Create email draft using Microsoft Graph API
try:
import requests
@@ -456,12 +665,27 @@ class MethodOutlook(MethodBase):
"Content-Type": "application/json"
}
+ # Clean and format body content
+ cleaned_body = body.strip()
+
+ # Check if body is already HTML
+ if cleaned_body.startswith('') or cleaned_body.startswith('') or '
' in cleaned_body:
+ # Body is already HTML, use as-is
+ html_body = cleaned_body
+ logger.info("Body content is already HTML formatted")
+ else:
+ # Convert plain text to proper HTML formatting
+ # Replace newlines with
tags and wrap in proper HTML structure
+ html_body = cleaned_body.replace('\n', '
')
+ html_body = f"{html_body}"
+ logger.info("Converted plain text to HTML format")
+
# Build the email message
message = {
"subject": subject,
"body": {
"contentType": "HTML",
- "content": body
+ "content": html_body
},
"toRecipients": [{"emailAddress": {"address": email}} for email in to],
"ccRecipients": [{"emailAddress": {"address": email}} for email in cc] if cc else [],
@@ -472,18 +696,47 @@ class MethodOutlook(MethodBase):
if attachments:
message["attachments"] = []
for attachment_ref in attachments:
+ logger.info(f"Processing attachment: {attachment_ref}")
+
# Get attachment document from service center
attachment_docs = self.service.getChatDocumentsFromDocumentList([attachment_ref])
if attachment_docs:
for doc in attachment_docs:
- # Create attachment object for Graph API
- attachment = {
- "@odata.type": "#microsoft.graph.fileAttachment",
- "name": doc.filename,
- "contentType": doc.mimeType,
- "contentBytes": doc.data if hasattr(doc, 'data') else ""
- }
- message["attachments"].append(attachment)
+ logger.info(f"Found attachment document: {doc.filename}, fileId: {getattr(doc, 'fileId', 'None')}")
+
+ # Get the actual file content using fileId
+ file_id = getattr(doc, 'fileId', None)
+ if file_id:
+ try:
+ # Read the actual file content
+ file_content = self.service.getFileData(file_id)
+ if file_content:
+ # Convert to base64 for Graph API
+ import base64
+ if isinstance(file_content, bytes):
+ content_bytes = file_content
+ else:
+ content_bytes = str(file_content).encode('utf-8')
+
+ base64_content = base64.b64encode(content_bytes).decode('utf-8')
+
+ # Create attachment object for Graph API
+ attachment = {
+ "@odata.type": "#microsoft.graph.fileAttachment",
+ "name": doc.filename,
+ "contentType": doc.mimeType or "application/octet-stream",
+ "contentBytes": base64_content
+ }
+ message["attachments"].append(attachment)
+ logger.info(f"✅ Successfully added attachment: {doc.filename} (size: {len(content_bytes)} bytes)")
+ else:
+ logger.warning(f"⚠️ No content found for attachment: {doc.filename}")
+ except Exception as e:
+ logger.error(f"❌ Error reading attachment file {doc.filename}: {str(e)}")
+ else:
+ logger.warning(f"⚠️ Attachment document has no fileId: {doc.filename}")
+ else:
+ logger.warning(f"⚠️ No attachment documents found for reference: {attachment_ref}")
# Create the draft message
# First, get the Drafts folder ID to ensure the draft is created there
@@ -493,124 +746,90 @@ class MethodOutlook(MethodBase):
# Create draft in the Drafts folder specifically
api_url = f"{graph_url}/me/mailFolders/{drafts_folder_id}/messages"
logger.info(f"Creating draft in Drafts folder (ID: {drafts_folder_id})")
+ logger.info(f"Target folder: Drafts (Entwürfe)")
+ logger.info(f"Mailbox account: {connection.get('userEmail', 'Unknown')}")
else:
# Fallback: create in default location
api_url = f"{graph_url}/me/messages"
logger.warning("Could not find Drafts folder, creating draft in default location")
+ logger.info(f"Mailbox account: {connection.get('userEmail', 'Unknown')}")
logger.info(f"Creating draft with API URL: {api_url}")
+ logger.info(f"Email body preview: {html_body[:200]}...")
+ logger.info(f"Number of attachments: {len(message.get('attachments', []))}")
+ if message.get('attachments'):
+ for i, att in enumerate(message['attachments']):
+ logger.info(f" Attachment {i+1}: {att['name']} ({att['contentType']}) - Content size: {len(att['contentBytes'])} chars")
logger.info(f"Draft message data: {json.dumps(message, indent=2)}")
response = requests.post(api_url, headers=headers, json=message)
- response.raise_for_status()
- draft_data = response.json()
- logger.info(f"Draft creation response: {json.dumps(draft_data, indent=2)}")
-
- # Verify the draft was created in the correct folder
- created_folder_id = draft_data.get("parentFolderId")
- if created_folder_id:
- if drafts_folder_id and created_folder_id == drafts_folder_id:
- logger.info(f"✅ Draft successfully created in Drafts folder (ID: {created_folder_id})")
- else:
- logger.warning(f"⚠️ Draft created in different folder than expected. Expected: {drafts_folder_id}, Actual: {created_folder_id}")
+ if response.status_code in [200, 201]:
+ draft_data = response.json()
+ draft_id = draft_data.get("id", "Unknown")
+ logger.info(f"✅ Email draft created successfully!")
+ logger.info(f"📧 Draft ID: {draft_id}")
+ logger.info(f"📁 Stored in: Drafts folder (Entwürfe)")
+ logger.info(f"📬 Mailbox: {connection.get('userEmail', 'Unknown')}")
+ logger.info(f"🔗 Draft URL: {api_url}")
+
+ # Return success with draft information
+ return self._createResult(
+ success=True,
+ data={
+ "documents": [
+ {
+ "documentName": f"email_draft_created_{datetime.now(UTC).strftime('%Y%m%d_%H%M%S')}.json",
+ "documentData": {
+ "status": "success",
+ "message": "Email draft created successfully",
+ "draftId": draft_id,
+ "folder": "Drafts (Entwürfe)",
+ "mailbox": connection.get('userEmail', 'Unknown'),
+ "subject": subject,
+ "recipients": to,
+ "timestamp": datetime.now(UTC).isoformat()
+ },
+ "mimeType": "application/json"
+ }
+ ]
+ }
+ )
else:
- logger.warning("⚠️ Draft created but no folder ID returned")
-
- # Get the actual folder information for the created draft
- actual_folder = "Drafts"
- if drafts_folder_id:
- actual_folder = "Drafts"
- else:
- # Try to determine where the draft was actually created
- if "parentFolderId" in draft_data:
- actual_folder = f"Folder ID: {draft_data['parentFolderId']}"
- else:
- actual_folder = "Default location"
-
- draft_result = {
- "status": "draft_created",
- "messageId": draft_data.get("id", "unknown"),
- "draftId": draft_data.get("id", "unknown"),
- "recipients": to,
- "cc": cc,
- "bcc": bcc,
- "attachments": len(attachments) if attachments else 0,
- "draftLocation": actual_folder,
- "draftsFolderId": drafts_folder_id,
- "createdFolderId": created_folder_id,
- "apiResponse": response.status_code,
- "draftData": draft_data
- }
-
- logger.info(f"Successfully created email draft for {len(to)} recipients with {len(attachments) if attachments else 0} attachments in {actual_folder}")
-
- # Additional verification: try to retrieve the draft to confirm it exists
- try:
- verify_url = f"{graph_url}/me/messages/{draft_data.get('id')}"
- verify_response = requests.get(verify_url, headers=headers)
- if verify_response.status_code == 200:
- verify_data = verify_response.json()
- logger.info(f"✅ Draft verification successful - Draft ID: {verify_data.get('id')}, Subject: {verify_data.get('subject')}")
- else:
- logger.warning(f"⚠️ Draft verification failed - Status: {verify_response.status_code}")
- except Exception as e:
- logger.warning(f"⚠️ Draft verification error: {str(e)}")
+ logger.error(f"Failed to create draft. Status: {response.status_code}, Response: {response.text}")
+ return self._createResult(
+ success=False,
+ data={},
+ error=f"Failed to create email draft: {response.status_code} - {response.text}"
+ )
except ImportError:
- logger.error("requests module not available, falling back to simulation")
- # Fallback to simulation if requests module is not available
- send_prompt = f"""
- Simulate creating an email draft in Microsoft Outlook.
-
- Connection: {connection['id']}
- To: {to}
- Subject: {subject}
- Body: {body}
- CC: {cc}
- BCC: {bcc}
- Attachments: {attachments if attachments else 'None'}
-
- Please provide:
- 1. Email composition details
- 2. Validation of email addresses
- 3. Email formatting and structure
- 4. Attachment processing and validation
- 5. Draft creation confirmation
- """
- draft_result = await self.service.interfaceAiCalls.callAiTextAdvanced(send_prompt)
+ logger.error("requests module not available")
+ return self._createResult(
+ success=False,
+ data={},
+ error="requests module not available"
+ )
except Exception as e:
logger.error(f"Error creating email draft via Microsoft Graph API: {str(e)}")
- # Fallback to simulation on API error
- send_prompt = f"""
- Simulate creating an email draft in Microsoft Outlook.
-
- Connection: {connection['id']}
- To: {to}
- Subject: {subject}
- Body: {body}
- CC: {cc}
- BCC: {bcc}
- Attachments: {attachments if attachments else 'None'}
-
- Please provide:
- 1. Email composition details
- 2. Validation of email addresses
- 3. Email formatting and structure
- 4. Attachment processing and validation
- 5. Draft creation confirmation
- """
- draft_result = await self.service.interfaceAiCalls.callAiTextAdvanced(send_prompt)
+ return self._createResult(
+ success=False,
+ data={},
+ error=f"Failed to create email draft: {str(e)}"
+ )
# Create result data
result_data = {
"connectionReference": connectionReference,
- "to": to,
- "subject": subject,
- "body": body,
- "cc": cc,
- "bcc": bcc,
- "attachments": attachments,
+ "composedEmailReference": composed_email_ref,
+ "extractedEmail": {
+ "to": to,
+ "subject": subject,
+ "body": body,
+ "cc": cc,
+ "bcc": bcc,
+ "attachments": attachments
+ },
"draftResult": draft_result,
"connection": {
"id": connection["id"],
@@ -752,7 +971,6 @@ class MethodOutlook(MethodBase):
elif response.status_code == 429:
logger.error("Too Many Requests (429) - Rate limit exceeded")
- # Fall back to simulation on API error
raise Exception(f"Microsoft Graph API returned {response.status_code}: {response.text}")
response.raise_for_status()
@@ -804,41 +1022,19 @@ class MethodOutlook(MethodBase):
logger.info(f"Successfully searched emails with query '{query}', found {len(emails)} results")
except ImportError:
- logger.error("requests module not available, falling back to simulation")
- # Fallback to simulation if requests module is not available
- search_prompt = f"""
- Simulate searching emails in Microsoft Outlook.
-
- Connection: {connection['id']}
- Query: {query}
- Folder: {folder}
- Limit: {limit}
-
- Please provide:
- 1. Search results with relevant emails
- 2. Search statistics and relevance scores
- 3. Email previews and key information
- 4. Search suggestions and refinements
- """
- search_result = await self.service.interfaceAiCalls.callAiTextAdvanced(search_prompt)
+ logger.error("requests module not available")
+ return self._createResult(
+ success=False,
+ data={},
+ error="requests module not available"
+ )
except Exception as e:
logger.error(f"Error searching emails via Microsoft Graph API: {str(e)}")
- # Fallback to simulation on API error
- search_prompt = f"""
- Simulate searching emails in Microsoft Outlook.
-
- Connection: {connection['id']}
- Query: {query}
- Folder: {folder}
- Limit: {limit}
-
- Please provide:
- 1. Search results with relevant emails
- 2. Search statistics and relevance scores
- 3. Email previews and key information
- 4. Search suggestions and refinements
- """
- search_result = await self.service.interfaceAiCalls.callAiTextAdvanced(search_prompt)
+ return self._createResult(
+ success=False,
+ data={},
+ error=f"Failed to search emails: {str(e)}"
+ )
# Create result data
result_data = {
@@ -889,7 +1085,6 @@ class MethodOutlook(MethodBase):
error=str(e)
)
- @action
async def listDrafts(self, parameters: Dict[str, Any]) -> ActionResult:
"""
List email drafts in Outlook
@@ -976,37 +1171,19 @@ class MethodOutlook(MethodBase):
logger.info(f"Successfully retrieved {len(messages)} drafts from folder '{folder}'")
except ImportError:
- logger.error("requests module not available, falling back to simulation")
- # Fallback to simulation
- drafts_prompt = f"""
- Simulate listing email drafts in Microsoft Outlook.
-
- Connection: {connection['id']}
- Folder: {folder}
- Limit: {limit}
-
- Please provide:
- 1. List of email drafts with subject, recipients, and modification date
- 2. Draft status and location information
- 3. Summary of draft statistics
- """
- drafts_result = await self.service.interfaceAiCalls.callAiTextAdvanced(drafts_prompt)
+ logger.error("requests module not available")
+ return self._createResult(
+ success=False,
+ data={},
+ error="requests module not available"
+ )
except Exception as e:
logger.error(f"Error listing drafts via Microsoft Graph API: {str(e)}")
- # Fallback to simulation on API error
- drafts_prompt = f"""
- Simulate listing email drafts in Microsoft Outlook.
-
- Connection: {connection['id']}
- Folder: {folder}
- Limit: {limit}
-
- Please provide:
- 1. List of email drafts with subject, recipients, and modification date
- 2. Draft status and location information
- 3. Summary of draft statistics
- """
- drafts_result = await self.service.interfaceAiCalls.callAiTextAdvanced(drafts_prompt)
+ return self._createResult(
+ success=False,
+ data={},
+ error=f"Failed to list drafts: {str(e)}"
+ )
# Create result data
result_data = {
@@ -1056,7 +1233,6 @@ class MethodOutlook(MethodBase):
error=str(e)
)
- @action
async def findDrafts(self, parameters: Dict[str, Any]) -> ActionResult:
"""
Find email drafts across all folders in Outlook
@@ -1131,35 +1307,19 @@ class MethodOutlook(MethodBase):
logger.info(f"Successfully found {len(drafts)} drafts across all folders")
except ImportError:
- logger.error("requests module not available, falling back to simulation")
- # Fallback to simulation
- drafts_prompt = f"""
- Simulate finding email drafts in Microsoft Outlook.
-
- Connection: {connection['id']}
- Limit: {limit}
-
- Please provide:
- 1. List of email drafts with subject, recipients, and location
- 2. Draft status and folder information
- 3. Summary of draft statistics
- """
- drafts_result = await self.service.interfaceAiCalls.callAiTextAdvanced(drafts_prompt)
+ logger.error("requests module not available")
+ return self._createResult(
+ success=False,
+ data={},
+ error="requests module not available"
+ )
except Exception as e:
logger.error(f"Error finding drafts via Microsoft Graph API: {str(e)}")
- # Fallback to simulation on API error
- drafts_prompt = f"""
- Simulate finding email drafts in Microsoft Outlook.
-
- Connection: {connection['id']}
- Limit: {limit}
-
- Please provide:
- 1. List of email drafts with subject, recipients, and location
- 2. Draft status and folder information
- 3. Summary of draft statistics
- """
- drafts_result = await self.service.interfaceAiCalls.callAiTextAdvanced(drafts_prompt)
+ return self._createResult(
+ success=False,
+ data={},
+ error=f"Failed to find drafts: {str(e)}"
+ )
# Create result data
result_data = {
@@ -1237,7 +1397,6 @@ class MethodOutlook(MethodBase):
logger.warning(f"Error getting folder name for ID '{folder_id}': {str(e)}")
return f"Unknown Folder ({folder_id})"
- @action
async def checkDraftsFolder(self, parameters: Dict[str, Any]) -> ActionResult:
"""
Check the contents of the Drafts folder directly
@@ -1322,35 +1481,19 @@ class MethodOutlook(MethodBase):
logger.info(f"Successfully checked Drafts folder: found {len(drafts)} drafts")
except ImportError:
- logger.error("requests module not available, falling back to simulation")
- # Fallback to simulation
- drafts_prompt = f"""
- Simulate checking Drafts folder in Microsoft Outlook.
-
- Connection: {connection['id']}
- Limit: {limit}
-
- Please provide:
- 1. List of email drafts in the Drafts folder
- 2. Draft details and status
- 3. Summary of draft contents
- """
- drafts_result = await self.service.interfaceAiCalls.callAiTextAdvanced(drafts_prompt)
+ logger.error("requests module not available")
+ return self._createResult(
+ success=False,
+ data={},
+ error="requests module not available"
+ )
except Exception as e:
logger.error(f"Error checking Drafts folder via Microsoft Graph API: {str(e)}")
- # Fallback to simulation on API error
- drafts_prompt = f"""
- Simulate checking Drafts folder in Microsoft Outlook.
-
- Connection: {connection['id']}
- Limit: {limit}
-
- Please provide:
- 1. List of email drafts in the Drafts folder
- 2. Draft details and status
- 3. Summary of draft contents
- """
- drafts_result = await self.service.interfaceAiCalls.callAiTextAdvanced(drafts_prompt)
+ return self._createResult(
+ success=False,
+ data={},
+ error=f"Failed to check Drafts folder: {str(e)}"
+ )
# Create result data
result_data = {
@@ -1400,6 +1543,246 @@ class MethodOutlook(MethodBase):
)
@action
+ async def composeEmail(self, parameters: Dict[str, Any]) -> ActionResult:
+ """
+ Compose email content using AI based on context and requirements
+
+ This action uses AI to generate professional email content including:
+ - Subject line
+ - Body content
+ - Recipient suggestions
+ - Attachment recommendations
+
+ Parameters:
+ context (str): Email context/requirements
+ recipient (str, optional): Recipient information
+ attachments (List[str], optional): Available documents to reference
+ tone (str, optional): Email tone (formal, casual, etc.)
+ documentList (List[str], optional): List of document references to include in context and as attachments
+ expectedDocumentFormats (list, optional): Expected output formats
+
+ Output Contract:
+ The action produces a JSON document with the following structure:
+ {
+ "context": "original context",
+ "recipient": "recipient info",
+ "tone": "email tone",
+ "timestamp": "ISO timestamp",
+ "usage": "usage description",
+ "to": ["email@example.com"],
+ "subject": "Email subject",
+ "body": "Email body content with document content integrated",
+ "cc": [],
+ "bcc": [],
+ "attachments": ["docItem:uuid:filename.pdf", "docItem:uuid:filename.html"]
+ }
+
+ Note: Email fields (to, subject, body, cc, bcc, attachments) are at root level
+ for direct consumption by sendEmail action. All documents from documentList are included as attachments.
+ """
+ try:
+ context = parameters.get("context")
+ recipient = parameters.get("recipient", "")
+ attachments = parameters.get("attachments", [])
+ tone = parameters.get("tone", "professional")
+ documentList = parameters.get("documentList", [])
+ expectedDocumentFormats = parameters.get("expectedDocumentFormats", [])
+
+ if not context:
+ return self._createResult(
+ success=False,
+ data={},
+ error="Context is required for email composition"
+ )
+
+ # Process input documents to extract content for AI context
+ document_content_summary = ""
+ all_attachments = []
+
+ if documentList:
+ logger.info(f"Processing {len(documentList)} input documents for email composition")
+ try:
+ # Get document content from service center
+ docs = self.service.getChatDocumentsFromDocumentList(documentList)
+ if docs:
+ for doc in docs:
+ # Add to attachments list
+ all_attachments.append(f"docItem:{doc.id}:{doc.filename}")
+
+ # Extract content for AI context
+ try:
+ if hasattr(doc, 'fileId') and doc.fileId:
+ file_content = self.service.getFileData(doc.fileId)
+ if file_content:
+ # Convert to string if needed
+ if isinstance(file_content, bytes):
+ content_str = file_content.decode('utf-8')
+ else:
+ content_str = str(file_content)
+
+ # Truncate content for AI context (avoid token limits)
+ content_preview = content_str[:1000] + "..." if len(content_str) > 1000 else content_str
+ document_content_summary += f"\nDocument: {doc.filename}\nContent Preview: {content_preview}\n"
+ logger.info(f"Extracted content preview from {doc.filename}: {len(content_str)} characters")
+ else:
+ logger.warning(f"No content found for document {doc.filename}")
+ else:
+ logger.warning(f"Document {doc.filename} has no fileId")
+ except Exception as e:
+ logger.warning(f"Error reading content from {doc.filename}: {str(e)}")
+ # Still include as attachment even if content can't be read
+ else:
+ logger.warning("No documents found from documentList")
+ except Exception as e:
+ logger.error(f"Error processing documentList: {str(e)}")
+
+ # Add any explicit attachments to the list
+ if attachments:
+ all_attachments.extend(attachments)
+ logger.info(f"Added {len(attachments)} explicit attachments to email")
+
+ # Remove duplicates while preserving order
+ seen = set()
+ unique_attachments = []
+ for att in all_attachments:
+ if att not in seen:
+ seen.add(att)
+ unique_attachments.append(att)
+
+ logger.info(f"Total unique attachments for email: {len(unique_attachments)}")
+ if document_content_summary:
+ logger.info(f"Document content summary length: {len(document_content_summary)} characters")
+ logger.info(f"Document content preview: {document_content_summary[:200]}...")
+
+ # Build AI prompt for email composition
+ ai_prompt = f"""
+ Compose a professional email based on the following requirements:
+
+ Context: {context}
+ Recipient: {recipient if recipient else 'Not specified'}
+ Tone: {tone}
+
+ Available documents/attachments: {unique_attachments if unique_attachments else 'None'}
+
+ {f"Document Content Summary:{document_content_summary}" if document_content_summary else ""}
+
+ Instructions:
+ 1. Use the document content to create a meaningful email body that references the actual content
+ 2. Include specific details from the documents in the email body
+ 3. Make the email relevant to the documents being shared
+ 4. Ensure the email body integrates information from the documents naturally
+
+ Please provide a JSON response with the following EXACT structure:
+ {{
+ "to": ["recipient@email.com"],
+ "subject": "Email Subject",
+ "body": "Email body content with proper greeting, document content integration, and closing",
+ "cc": [],
+ "bcc": [],
+ "attachments": {unique_attachments}
+ }}
+
+ IMPORTANT: Return ONLY the JSON object above, no additional text or formatting.
+ The response must be valid JSON that can be parsed directly.
+ """
+
+ # Call AI to compose the email
+ try:
+ composed_email = await self.service.interfaceAiCalls.callAiTextAdvanced(ai_prompt)
+
+ # Parse the AI response to ensure it's valid JSON
+ try:
+ import json
+ # Clean the response and parse as JSON
+ cleaned_response = composed_email.strip()
+ if cleaned_response.startswith('```json'):
+ cleaned_response = cleaned_response[7:]
+ if cleaned_response.endswith('```'):
+ cleaned_response = cleaned_response[:-3]
+ cleaned_response = cleaned_response.strip()
+
+ # Parse to validate JSON structure
+ email_data = json.loads(cleaned_response)
+
+ # Validate required fields
+ required_fields = ["to", "subject", "body"]
+ missing_fields = [field for field in required_fields if field not in email_data]
+ if missing_fields:
+ raise ValueError(f"Missing required fields: {missing_fields}")
+
+ logger.info("AI response successfully parsed and validated")
+
+ except json.JSONDecodeError as e:
+ logger.error(f"AI response is not valid JSON: {str(e)}")
+ return self._createResult(
+ success=False,
+ data={},
+ error=f"AI response is not valid JSON: {str(e)}"
+ )
+ except ValueError as e:
+ logger.error(f"AI response missing required fields: {str(e)}")
+ return self._createResult(
+ success=False,
+ data={},
+ error=f"AI response missing required fields: {str(e)}"
+ )
+
+ # Create result data - output the email data directly, not wrapped
+ result_data = {
+ "context": context,
+ "recipient": recipient,
+ "tone": tone,
+ "timestamp": datetime.now(UTC).isoformat(),
+ "usage": "This document contains a composed email that can be used with the sendEmail action",
+ # Include the email data directly at root level for sendEmail to consume
+ **email_data # This spreads the email fields (to, subject, body, cc, bcc, attachments) directly
+ }
+
+ # Ensure attachments are properly set from our processed list
+ if unique_attachments:
+ result_data["attachments"] = unique_attachments
+ logger.info(f"Final email attachments: {unique_attachments}")
+
+ # Determine output format - ALWAYS use JSON for email composition
+ # This action must produce JSON for sendEmail to parse correctly
+ output_extension = ".json"
+ output_mime_type = "application/json"
+
+ # Ignore any expectedDocumentFormats - this action has a fixed output format
+ if expectedDocumentFormats and len(expectedDocumentFormats) > 0:
+ logger.info(f"Ignoring expected format '{expectedDocumentFormats[0].get('extension', 'unknown')}' - composeEmail always produces JSON")
+
+ logger.info(f"composeEmail action always produces: {output_extension} ({output_mime_type})")
+
+ return self._createResult(
+ success=True,
+ data={
+ "documents": [
+ {
+ "documentName": f"composed_email_{datetime.now(UTC).strftime('%Y%m%d_%H%M%S')}{output_extension}",
+ "documentData": result_data,
+ "mimeType": output_mime_type
+ }
+ ]
+ }
+ )
+
+ except Exception as e:
+ logger.error(f"Error calling AI for email composition: {str(e)}")
+ return self._createResult(
+ success=False,
+ data={},
+ error=f"Failed to compose email: {str(e)}"
+ )
+
+ except Exception as e:
+ logger.error(f"Error composing email: {str(e)}")
+ return self._createResult(
+ success=False,
+ data={},
+ error=str(e)
+ )
+
async def checkPermissions(self, parameters: Dict[str, Any]) -> ActionResult:
"""
Check if the current Microsoft connection has the necessary permissions for Outlook operations.
@@ -1458,4 +1841,5 @@ class MethodOutlook(MethodBase):
success=False,
data={},
error=str(e)
- )
\ No newline at end of file
+ )
+
diff --git a/modules/routes/routeDataConnections.py b/modules/routes/routeDataConnections.py
index 3a140cb9..8d63a8cd 100644
--- a/modules/routes/routeDataConnections.py
+++ b/modules/routes/routeDataConnections.py
@@ -34,8 +34,7 @@ async def get_connections(
interface = getInterface(currentUser)
# Clear connections cache to ensure fresh data
- if "connections" in interface.db._tablesCache:
- del interface.db._tablesCache["connections"]
+ interface.db.clearTableCache("connections")
if currentUser.privilege in ['admin', 'sysadmin']:
# Admins can see all connections
@@ -107,6 +106,9 @@ async def create_connection(
# Save connection record
interface.db.recordModify("connections", connection.id, connection_dict)
+ # Clear cache to ensure fresh data
+ interface.db.clearTableCache("connections")
+
return connection
except HTTPException:
@@ -174,9 +176,13 @@ async def update_connection(
elif isinstance(connection_dict[field], (int, float)):
connection_dict[field] = datetime.fromtimestamp(connection_dict[field]).isoformat()
- # Update connection record
+ # Update connection
interface.db.recordModify("connections", connectionId, connection_dict)
+ # Clear cache to ensure fresh data
+ interface.db.clearTableCache("connections")
+
+ # Get updated connection
return connection
except HTTPException:
@@ -306,6 +312,9 @@ async def disconnect_service(
# Update connection record
interface.db.recordModify("connections", connectionId, connection.to_dict())
+ # Clear cache to ensure fresh data
+ interface.db.clearTableCache("connections")
+
return {"message": "Service disconnected successfully"}
except HTTPException:
diff --git a/modules/routes/routeSecurityGoogle.py b/modules/routes/routeSecurityGoogle.py
index ae396994..b47885e7 100644
--- a/modules/routes/routeSecurityGoogle.py
+++ b/modules/routes/routeSecurityGoogle.py
@@ -332,6 +332,9 @@ async def auth_callback(code: str, state: str, request: Request) -> HTMLResponse
# Update connection record directly
rootInterface.db.recordModify("connections", connection_id, connection.to_dict())
+ # Clear cache to ensure fresh data
+ rootInterface.db.clearTableCache("connections")
+
# Save token
token = Token(
userId=user.id, # Use local user's ID
diff --git a/modules/routes/routeSecurityMsft.py b/modules/routes/routeSecurityMsft.py
index bf721fb6..5ea83064 100644
--- a/modules/routes/routeSecurityMsft.py
+++ b/modules/routes/routeSecurityMsft.py
@@ -314,6 +314,9 @@ async def auth_callback(code: str, state: str, request: Request) -> HTMLResponse
# Update connection record directly
rootInterface.db.recordModify("connections", connection_id, connection.to_dict())
+ # Clear cache to ensure fresh data
+ rootInterface.db.clearTableCache("connections")
+
# Save token
token = Token(
userId=user.id, # Use local user's ID
diff --git a/modules/security/auth.py b/modules/security/auth.py
index 4eb405f0..c69086db 100644
--- a/modules/security/auth.py
+++ b/modules/security/auth.py
@@ -163,6 +163,9 @@ def createUserSession(userId: str, tokenId: str, request: Request) -> Session:
# Save session to database
appInterface.db.recordCreate("sessions", session.to_dict())
+ # Clear cache to ensure fresh data
+ appInterface.db.clearTableCache("sessions")
+
# Log auth event
event = AuthEvent(
userId=userId,
@@ -173,6 +176,9 @@ def createUserSession(userId: str, tokenId: str, request: Request) -> Session:
)
appInterface.db.recordCreate("auth_events", event.to_dict())
+ # Clear cache to ensure fresh data
+ appInterface.db.clearTableCache("auth_events")
+
return session
def logAuthEvent(userId: str, eventType: str, details: Dict[str, Any], request: Request) -> None:
@@ -189,6 +195,9 @@ def logAuthEvent(userId: str, eventType: str, details: Dict[str, Any], request:
# Save event to database
appInterface.db.recordCreate("auth_events", event.to_dict())
+
+ # Clear cache to ensure fresh data
+ appInterface.db.clearTableCache("auth_events")
def validateSession(sessionId: str) -> bool:
"""Validate a user session."""
@@ -207,6 +216,9 @@ def validateSession(sessionId: str) -> bool:
"lastActivity": datetime.now(timezone.utc)
})
+ # Clear cache to ensure fresh data
+ appInterface.db.clearTableCache("sessions")
+
return True
def revokeSession(sessionId: str) -> None:
@@ -215,6 +227,9 @@ def revokeSession(sessionId: str) -> None:
# Delete session
appInterface.db.recordDelete("sessions", sessionId)
+
+ # Clear cache to ensure fresh data
+ appInterface.db.clearTableCache("sessions")
def revokeAllUserSessions(userId: str) -> None:
"""Revoke all sessions for a user."""
@@ -226,3 +241,6 @@ def revokeAllUserSessions(userId: str) -> None:
# Delete each session
for session in sessions:
appInterface.db.recordDelete("sessions", session["id"])
+
+ # Clear cache to ensure fresh data
+ appInterface.db.clearTableCache("sessions")