From cf94b1115b3107a4f79c314e69c141c3640a8968 Mon Sep 17 00:00:00 2001
From: ValueOn AG
Date: Mon, 26 May 2025 07:04:30 +0200
Subject: [PATCH] ref
---
app.py | 42 +-
env_dev.env | 30 +-
env_prod.env | 30 +-
modules/agents/agentAnalyst.py | 20 +-
modules/agents/agentDocumentation.py | 14 +-
modules/agents/agentEmail.py | 2 +-
modules/agents/agentSharepoint.py | 348 +++++++++
modules/agents/agentWebcrawler.py | 12 +-
modules/interfaces/gatewayModel.py | 151 ----
modules/interfaces/googleAccess.py | 113 ---
modules/interfaces/googleInterface.py | 287 -------
modules/interfaces/googleModel.py | 35 -
modules/interfaces/lucydomModel.py | 246 ------
modules/interfaces/msftAccess.py | 113 ---
modules/interfaces/msftInterface.py | 520 -------------
modules/interfaces/msftModel.py | 70 --
.../{gatewayAccess.py => serviceAppAccess.py} | 101 ++-
...gatewayInterface.py => serviceAppClass.py} | 513 +++++--------
modules/interfaces/serviceAppModel.py | 211 ++++++
modules/interfaces/serviceAppTokens.py | 52 ++
modules/interfaces/serviceChatAccess.py | 133 ++++
...ucydomInterface.py => serviceChatClass.py} | 483 +-----------
modules/interfaces/serviceChatModel.py | 130 ++++
...omAccess.py => serviceManagementAccess.py} | 6 +-
modules/interfaces/serviceManagementClass.py | 704 ++++++++++++++++++
modules/interfaces/serviceManagementModel.py | 75 ++
modules/routes/routeAdmin.py | 63 ++
modules/routes/routeAttributes.py | 35 +-
.../{routeFiles.py => routeDataFiles.py} | 94 ++-
...{routeMandates.py => routeDataMandates.py} | 72 +-
.../{routePrompts.py => routeDataPrompts.py} | 70 +-
.../{routeUsers.py => routeDataUsers.py} | 81 +-
modules/routes/routeGeneral.py | 132 ----
modules/routes/routeGoogle.py | 322 --------
modules/routes/routeMsft.py | 322 --------
modules/routes/routeSecurityGoogle.py | 164 ++++
modules/routes/routeSecurityLocal.py | 195 +++++
modules/routes/routeSecurityMsft.py | 154 ++++
modules/routes/routeWorkflows.py | 57 +-
modules/security/auth.py | 135 +++-
modules/shared/attributeUtils.py | 154 ++++
modules/shared/defAttributes.py | 123 ---
modules/workflow/agentBase.py | 30 +-
modules/workflow/agentRegistry.py | 21 +-
modules/workflow/documentProcessor.py | 2 +-
modules/workflow/workflowManager.py | 561 ++++++--------
notes/changelog.txt | 131 +++-
notes/nda.txt | 37 -
notes/releasenotes.txt | 8 +
requirements.txt | 3 +
50 files changed, 3504 insertions(+), 3903 deletions(-)
create mode 100644 modules/agents/agentSharepoint.py
delete mode 100644 modules/interfaces/gatewayModel.py
delete mode 100644 modules/interfaces/googleAccess.py
delete mode 100644 modules/interfaces/googleInterface.py
delete mode 100644 modules/interfaces/googleModel.py
delete mode 100644 modules/interfaces/lucydomModel.py
delete mode 100644 modules/interfaces/msftAccess.py
delete mode 100644 modules/interfaces/msftInterface.py
delete mode 100644 modules/interfaces/msftModel.py
rename modules/interfaces/{gatewayAccess.py => serviceAppAccess.py} (54%)
rename modules/interfaces/{gatewayInterface.py => serviceAppClass.py} (54%)
create mode 100644 modules/interfaces/serviceAppModel.py
create mode 100644 modules/interfaces/serviceAppTokens.py
create mode 100644 modules/interfaces/serviceChatAccess.py
rename modules/interfaces/{lucydomInterface.py => serviceChatClass.py} (66%)
create mode 100644 modules/interfaces/serviceChatModel.py
rename modules/interfaces/{lucydomAccess.py => serviceManagementAccess.py} (97%)
create mode 100644 modules/interfaces/serviceManagementClass.py
create mode 100644 modules/interfaces/serviceManagementModel.py
create mode 100644 modules/routes/routeAdmin.py
rename modules/routes/{routeFiles.py => routeDataFiles.py} (69%)
rename modules/routes/{routeMandates.py => routeDataMandates.py} (66%)
rename modules/routes/{routePrompts.py => routeDataPrompts.py} (56%)
rename modules/routes/{routeUsers.py => routeDataUsers.py} (64%)
delete mode 100644 modules/routes/routeGeneral.py
delete mode 100644 modules/routes/routeGoogle.py
delete mode 100644 modules/routes/routeMsft.py
create mode 100644 modules/routes/routeSecurityGoogle.py
create mode 100644 modules/routes/routeSecurityLocal.py
create mode 100644 modules/routes/routeSecurityMsft.py
create mode 100644 modules/shared/attributeUtils.py
delete mode 100644 modules/shared/defAttributes.py
delete mode 100644 notes/nda.txt
create mode 100644 notes/releasenotes.txt
diff --git a/app.py b/app.py
index 3e405e65..376afe8a 100644
--- a/app.py
+++ b/app.py
@@ -17,17 +17,41 @@ def initLogging():
logLevelName = APP_CONFIG.get("APP_LOGGING_LOG_LEVEL", "WARNING")
logLevel = getattr(logging, logLevelName)
+ # Create formatters
+ consoleFormatter = logging.Formatter(
+ fmt=APP_CONFIG.get("APP_LOGGING_FORMAT", "%(asctime)s - %(levelname)s - %(name)s - %(message)s"),
+ datefmt=APP_CONFIG.get("APP_LOGGING_DATE_FORMAT", "%Y-%m-%d %H:%M:%S")
+ )
+
+ # File formatter with more detailed error information
+ fileFormatter = logging.Formatter(
+ fmt="%(asctime)s - %(levelname)s - %(name)s - %(message)s - %(pathname)s:%(lineno)d\n%(funcName)s\n%(exc_info)s",
+ datefmt=APP_CONFIG.get("APP_LOGGING_DATE_FORMAT", "%Y-%m-%d %H:%M:%S")
+ )
+
# Configure handlers based on config
handlers = []
# Add console handler if enabled
if APP_CONFIG.get("APP_LOGGING_CONSOLE_ENABLED", True):
consoleHandler = logging.StreamHandler()
+ consoleHandler.setFormatter(consoleFormatter)
handlers.append(consoleHandler)
# Add file handler if enabled
if APP_CONFIG.get("APP_LOGGING_FILE_ENABLED", True):
+ # Get log file path and ensure it's absolute
logFile = APP_CONFIG.get("APP_LOGGING_LOG_FILE", "app.log")
+ if not os.path.isabs(logFile):
+ # If relative path, make it relative to the gateway directory
+ gatewayDir = os.path.dirname(os.path.abspath(__file__))
+ logFile = os.path.join(gatewayDir, logFile)
+
+ # Ensure log directory exists
+ logDir = os.path.dirname(logFile)
+ if logDir:
+ os.makedirs(logDir, exist_ok=True)
+
rotationSize = int(APP_CONFIG.get("APP_LOGGING_ROTATION_SIZE", 10485760)) # Default: 10MB
backupCount = int(APP_CONFIG.get("APP_LOGGING_BACKUP_COUNT", 5))
@@ -36,9 +60,10 @@ def initLogging():
maxBytes=rotationSize,
backupCount=backupCount
)
+ fileHandler.setFormatter(fileFormatter)
handlers.append(fileHandler)
- # Configure the logger
+ # Configure the root logger
logging.basicConfig(
level=logLevel,
format=APP_CONFIG.get("APP_LOGGING_FORMAT", "%(asctime)s - %(levelname)s - %(name)s - %(message)s"),
@@ -46,6 +71,7 @@ def initLogging():
handlers=handlers
)
+
# Silence noisy third-party libraries - use the same level as the root logger
noisyLoggers = ["httpx", "urllib3", "asyncio", "fastapi.security.oauth2"]
for loggerName in noisyLoggers:
@@ -96,29 +122,29 @@ app.add_middleware(
)
# Include all routers
-from modules.routes.routeGeneral import router as generalRouter
+from modules.routes.routeAdmin import router as generalRouter
app.include_router(generalRouter)
from modules.routes.routeAttributes import router as attributesRouter
app.include_router(attributesRouter)
-from modules.routes.routeMandates import router as mandateRouter
+from modules.routes.routeDataMandates import router as mandateRouter
app.include_router(mandateRouter)
-from modules.routes.routeUsers import router as userRouter
+from modules.routes.routeDataUsers import router as userRouter
app.include_router(userRouter)
-from modules.routes.routeFiles import router as fileRouter
+from modules.routes.routeDataFiles import router as fileRouter
app.include_router(fileRouter)
-from modules.routes.routePrompts import router as promptRouter
+from modules.routes.routeDataPrompts import router as promptRouter
app.include_router(promptRouter)
from modules.routes.routeWorkflows import router as workflowRouter
app.include_router(workflowRouter)
-from modules.routes.routeMsft import router as msftRouter
+from modules.routes.routeSecurityMsft import router as msftRouter
app.include_router(msftRouter)
-from modules.routes.routeGoogle import router as googleRouter
+from modules.routes.routeSecurityGoogle import router as googleRouter
app.include_router(googleRouter)
diff --git a/env_dev.env b/env_dev.env
index 8e01929b..53d85460 100644
--- a/env_dev.env
+++ b/env_dev.env
@@ -5,23 +5,23 @@ APP_ENV_TYPE = dev
APP_ENV_LABEL = Development Instance Patrick
APP_API_URL = http://localhost:8000
-# Database Configuration Gateway
-DB_GATEWAY_HOST=D:/Temp/_powerondb
-DB_GATEWAY_DATABASE=gateway
-DB_GATEWAY_USER=dev_user
-DB_GATEWAY_PASSWORD_SECRET=dev_password
+# Database Configuration for Application
+DB_APP_HOST=D:/Temp/_powerondb
+DB_APP_DATABASE=app
+DB_APPY_USER=dev_user
+DB_APP_PASSWORD_SECRET=dev_password
-# Database Configuration LucyDOM
-DB_LUCYDOM_HOST=D:/Temp/_powerondb
-DB_LUCYDOM_DATABASE=lucydom
-DB_LUCYDOM_USER=dev_user
-DB_LUCYDOM_PASSWORD_SECRET=dev_password
+# Database Configuration Chat
+DB_CHAT_HOST=D:/Temp/_powerondb
+DB_CHAT_DATABASE=chat
+DB_CHAT_USER=dev_user
+DB_CHAT_PASSWORD_SECRET=dev_password
-# Database Configuration MSFT
-DB_MSFT_HOST=D:/Temp/_powerondb
-DB_MSFT_DATABASE=msft
-DB_MSFT_USER=dev_user
-DB_MSFT_PASSWORD_SECRET=dev_password
+# Database Configuration Management
+DB_MANAGEMENT_HOST=D:/Temp/_powerondb
+DB_MANAGEMENT_DATABASE=management
+DB_MANAGEMENT_USER=dev_user
+DB_MANAGEMENT_PASSWORD_SECRET=dev_password
# Security Configuration
APP_JWT_SECRET_SECRET=dev_jwt_secret_token
diff --git a/env_prod.env b/env_prod.env
index 869452d3..694a0b2e 100644
--- a/env_prod.env
+++ b/env_prod.env
@@ -5,23 +5,23 @@ APP_ENV_TYPE = prod
APP_ENV_LABEL = Production Instance
APP_API_URL = https://gateway.poweron-center.net
-# Database Configuration Gateway
-DB_GATEWAY_HOST=/home/_powerondb
-DB_GATEWAY_DATABASE=gateway
-DB_GATEWAY_USER=dev_user
-DB_GATEWAY_PASSWORD_SECRET=prod_password
+# Database Configuration Application
+DB_APP_HOST=/home/_powerondb
+DB_APP_DATABASE=app
+DB_APPY_USER=dev_user
+DB_APP_PASSWORD_SECRET=dev_password
-# Database Configuration LucyDOM
-DB_LUCYDOM_HOST=/home/_powerondb
-DB_LUCYDOM_DATABASE=lucydom
-DB_LUCYDOM_USER=dev_user
-DB_LUCYDOM_PASSWORD_SECRET=prod_password
+# Database Configuration Chat
+DB_CHAT_HOST=/home/_powerondb
+DB_CHAT_DATABASE=chat
+DB_CHAT_USER=dev_user
+DB_CHAT_PASSWORD_SECRET=dev_password
-# Database Configuration MSFT
-DB_MSFT_HOST=/home/_powerondb
-DB_MSFT_DATABASE=msft
-DB_MSFT_USER=dev_user
-DB_MSFT_PASSWORD_SECRET=dev_password
+# Database Configuration Management
+DB_MANAGEMENT_HOST=/home/_powerondb
+DB_MANAGEMENT_DATABASE=management
+DB_MANAGEMENT_USER=dev_user
+DB_MANAGEMENT_PASSWORD_SECRET=dev_password
# Security Configuration
APP_JWT_SECRET_SECRET=dev_jwt_secret_token
diff --git a/modules/agents/agentAnalyst.py b/modules/agents/agentAnalyst.py
index fd0864fb..c2d392f4 100644
--- a/modules/agents/agentAnalyst.py
+++ b/modules/agents/agentAnalyst.py
@@ -13,9 +13,20 @@ from typing import Dict, Any, List, Optional
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
+from datetime import datetime
+import hashlib
+import uuid
+import re
+import shutil
+from pathlib import Path
+import traceback
+import sys
+import importlib.util
+import inspect
+from pydantic import BaseModel
from modules.workflow.agentBase import AgentBase
-from modules.interfaces.lucydomModel import ChatContent
+from modules.interfaces.serviceChatModel import ChatContent
logger = logging.getLogger(__name__)
@@ -68,7 +79,7 @@ class AgentAnalyst(AgentBase):
# Create analysis plan
if workflow:
- self.workflowManager.logAdd(workflow, "Extracting data from documents...", level="info", progress=35)
+ self.service.logAdd(workflow, "Extracting data from documents...", level="info", progress=35)
analysisPlan = await self._createAnalysisPlan(prompt)
# Check if this is truly an analysis task
@@ -80,15 +91,14 @@ class AgentAnalyst(AgentBase):
# Analyze data
if workflow:
- self.workflowManager.logAdd(workflow, "Analyzing task requirements...", level="info", progress=45)
+ self.service.logAdd(workflow, "Analyzing task requirements...", level="info", progress=45)
analysisResults = await self._analyzeData(task, analysisPlan)
# Format results into requested output documents
totalSpecs = len(outputSpecs)
for i, spec in enumerate(outputSpecs):
progress = 50 + int((i / totalSpecs) * 40) # Progress from 50% to 90%
- if self.workflowManager:
- self.workflowManager.logAdd(workflow, f"Creating output {i+1}/{totalSpecs}...", level="info", progress=progress)
+ self.service.logAdd(workflow, f"Creating output {i+1}/{totalSpecs}...", level="info", progress=progress)
documents = await self._createOutputDocuments(
prompt,
diff --git a/modules/agents/agentDocumentation.py b/modules/agents/agentDocumentation.py
index 81d3ae43..1cf3e3b2 100644
--- a/modules/agents/agentDocumentation.py
+++ b/modules/agents/agentDocumentation.py
@@ -4,14 +4,24 @@ Provides comprehensive documentation generation capabilities.
"""
import logging
-from typing import Dict, Any, List
+from typing import Dict, Any, List, Optional
import json
import re
from datetime import datetime
import os
+import hashlib
+import base64
+import uuid
+import shutil
+from pathlib import Path
+import traceback
+import sys
+import importlib.util
+import inspect
+from pydantic import BaseModel
from modules.workflow.agentBase import AgentBase
-from modules.interfaces.lucydomModel import ChatContent
+from modules.interfaces.serviceChatModel import ChatContent
logger = logging.getLogger(__name__)
diff --git a/modules/agents/agentEmail.py b/modules/agents/agentEmail.py
index b5e161f6..308ea3e6 100644
--- a/modules/agents/agentEmail.py
+++ b/modules/agents/agentEmail.py
@@ -6,7 +6,7 @@ Handles email-related tasks using Microsoft Graph API.
import logging
import json
from typing import Dict, Any, List, Optional
-from ..workflow.agentBase import AgentBase
+from modules.workflow.agentBase import AgentBase
logger = logging.getLogger(__name__)
diff --git a/modules/agents/agentSharepoint.py b/modules/agents/agentSharepoint.py
new file mode 100644
index 00000000..a0fa0b0d
--- /dev/null
+++ b/modules/agents/agentSharepoint.py
@@ -0,0 +1,348 @@
+"""
+SharePoint Agent Module.
+Handles SharePoint document search and data extraction using Microsoft Graph API.
+"""
+
+import logging
+import json
+from typing import Dict, Any, List, Optional
+from modules.workflow.agentBase import AgentBase
+
+logger = logging.getLogger(__name__)
+
+class AgentSharepoint(AgentBase):
+ """Agent for handling SharePoint document operations."""
+
+ def __init__(self):
+ """Initialize the SharePoint agent."""
+ super().__init__()
+ self.name = "sharepoint"
+ self.label = "SharePoint Agent"
+ self.description = "Searches and extracts data from SharePoint documents using Microsoft Graph API"
+ self.capabilities = [
+ "document_search",
+ "content_extraction",
+ "metadata_analysis",
+ "document_processing"
+ ]
+
+ async def processTask(self, task: Dict[str, Any]) -> Dict[str, Any]:
+ """
+ Process a SharePoint-related task.
+
+ Args:
+ task: Task object containing:
+ - prompt: Instructions for the agent
+ - inputDocuments: List of documents to process
+ - outputSpecifications: List of required output documents
+ - context: Additional context including workflow info
+
+ Returns:
+ Dictionary containing:
+ - feedback: Text response explaining what was done
+ - documents: List of created documents
+ """
+ try:
+ # Extract task information
+ prompt = task.get("prompt", "")
+ inputDocuments = task.get("inputDocuments", [])
+ outputSpecs = task.get("outputSpecifications", [])
+
+ # Check AI service
+ if not self.service.base:
+ return {
+ "feedback": "The SharePoint agent requires an AI service to function.",
+ "documents": []
+ }
+
+ # Check if Microsoft connector is available
+ if not hasattr(self.service, 'msft'):
+ return {
+ "feedback": "Microsoft connector not available. Please ensure Microsoft integration is properly configured.",
+ "documents": []
+ }
+
+ # Get Microsoft token
+ token_data = self.service.msft.getMsftToken()
+ if not token_data:
+ # Create authentication trigger document
+ auth_doc = self._createFrontendAuthTriggerDocument()
+ return {
+ "feedback": "Microsoft authentication required. Please authenticate to continue.",
+ "documents": [auth_doc]
+ }
+
+ # Parse the search query from the prompt
+ searchQuery = await self._parseSearchQuery(prompt)
+
+ # Search SharePoint documents
+ searchResults = await self._searchSharePointDocuments(searchQuery)
+
+ # Process search results
+ documents = []
+ for spec in outputSpecs:
+ label = spec.get("label", "")
+ description = spec.get("description", "")
+
+ if label.endswith(".json"):
+ # Create JSON summary of search results
+ summaryDoc = self._createSearchSummaryJson(searchResults, description)
+ documents.append(summaryDoc)
+ elif label.endswith(".csv"):
+ # Create CSV summary of search results
+ summaryDoc = self._createSearchSummaryCsv(searchResults, description)
+ documents.append(summaryDoc)
+ else:
+ # Create text summary of search results
+ summaryDoc = self._createSearchSummaryText(searchResults, description)
+ documents.append(summaryDoc)
+
+ # Prepare feedback message
+ feedback = f"Found {len(searchResults)} documents matching your search criteria. "
+ if searchResults:
+ feedback += "The results have been saved as documents."
+ else:
+ feedback += "No matching documents were found."
+
+ return {
+ "feedback": feedback,
+ "documents": documents
+ }
+
+ except Exception as e:
+ logger.error(f"Error in SharePoint agent: {str(e)}")
+ return {
+ "feedback": f"Error processing SharePoint task: {str(e)}",
+ "documents": []
+ }
+
+ def _createFrontendAuthTriggerDocument(self) -> Dict[str, Any]:
+ """Create a document that triggers Microsoft authentication in the frontend."""
+ return self.formatAgentDocumentOutput(
+ "microsoft_auth.html",
+ """
+
+
Microsoft Authentication Required
+
Please click the button below to authenticate with Microsoft:
+
+
+ """,
+ "text/html"
+ )
+
+ async def _parseSearchQuery(self, prompt: str) -> Dict[str, Any]:
+ """
+ Parse the search query from the prompt using AI.
+
+ Args:
+ prompt: The task prompt
+
+ Returns:
+ Dictionary containing search parameters
+ """
+ try:
+ # Use AI to parse the search query
+ response = await self.service.base.callAi([
+ {"role": "system", "content": "You are a SharePoint search query parser. Extract search parameters from the user's request."},
+ {"role": "user", "content": f"""
+ Parse the following SharePoint search request into structured parameters:
+
+ {prompt}
+
+ Return a JSON object with these fields:
+ - query: The main search query
+ - site: Optional SharePoint site name
+ - folder: Optional folder path
+ - fileTypes: List of file types to search for
+ - dateRange: Optional date range for filtering
+ - maxResults: Maximum number of results to return
+
+ Only return valid JSON. No preamble or explanations.
+ """}
+ ])
+
+ # Extract JSON from response
+ jsonStart = response.find('{')
+ jsonEnd = response.rfind('}') + 1
+
+ if jsonStart >= 0 and jsonEnd > jsonStart:
+ return json.loads(response[jsonStart:jsonEnd])
+ else:
+ # Fallback to simple query
+ return {
+ "query": prompt,
+ "maxResults": 10
+ }
+
+ except Exception as e:
+ logger.warning(f"Error parsing search query: {str(e)}")
+ return {
+ "query": prompt,
+ "maxResults": 10
+ }
+
+ async def _searchSharePointDocuments(self, searchParams: Dict[str, Any]) -> List[Dict[str, Any]]:
+ """
+ Search SharePoint documents using Microsoft Graph API.
+
+ Args:
+ searchParams: Search parameters
+
+ Returns:
+ List of search results
+ """
+ try:
+ # Get Microsoft token
+ token = self.service.msft.getMsftToken()
+ if not token:
+ return []
+
+ # Prepare search query
+ query = searchParams.get("query", "")
+ site = searchParams.get("site", "")
+ folder = searchParams.get("folder", "")
+ fileTypes = searchParams.get("fileTypes", [])
+ maxResults = searchParams.get("maxResults", 10)
+
+ # Build search URL
+ searchUrl = "https://graph.microsoft.com/v1.0/sites/root/drives"
+ if site:
+ searchUrl = f"https://graph.microsoft.com/v1.0/sites/{site}/drives"
+
+ # Get drives (document libraries)
+ response = self.service.msft.makeGraphRequest("GET", searchUrl)
+ if not response or "value" not in response:
+ return []
+
+ results = []
+ for drive in response["value"]:
+ # Search in each drive
+ driveId = drive["id"]
+ searchEndpoint = f"https://graph.microsoft.com/v1.0/drives/{driveId}/root/search(q='{query}')"
+
+ # Add file type filters if specified
+ if fileTypes:
+ typeFilter = " or ".join([f"fileType eq '{ft}'" for ft in fileTypes])
+ searchEndpoint += f"&filter={typeFilter}"
+
+ # Add folder filter if specified
+ if folder:
+ searchEndpoint += f"&filter=parentReference/path eq '/{folder}'"
+
+ # Add result limit
+ searchEndpoint += f"&top={maxResults}"
+
+ # Make the search request
+ searchResponse = self.service.msft.makeGraphRequest("GET", searchEndpoint)
+ if searchResponse and "value" in searchResponse:
+ for item in searchResponse["value"]:
+ # Get file content
+ fileContent = await self._getFileContent(driveId, item["id"])
+
+ results.append({
+ "name": item["name"],
+ "id": item["id"],
+ "driveId": driveId,
+ "webUrl": item["webUrl"],
+ "lastModified": item["lastModifiedDateTime"],
+ "size": item["size"],
+ "content": fileContent
+ })
+
+ return results
+
+ except Exception as e:
+ logger.error(f"Error searching SharePoint: {str(e)}")
+ return []
+
+ async def _getFileContent(self, driveId: str, fileId: str) -> str:
+ """
+ Get file content from SharePoint.
+
+ Args:
+ driveId: Drive ID
+ fileId: File ID
+
+ Returns:
+ File content as string
+ """
+ try:
+ # Get file content URL
+ contentUrl = f"https://graph.microsoft.com/v1.0/drives/{driveId}/items/{fileId}/content"
+
+ # Download file content
+ response = self.service.msft.makeGraphRequest("GET", contentUrl, raw=True)
+ if response:
+ return response.decode('utf-8')
+ return ""
+
+ except Exception as e:
+ logger.error(f"Error getting file content: {str(e)}")
+ return ""
+
+ def _createSearchSummaryJson(self, results: List[Dict[str, Any]], description: str) -> Dict[str, Any]:
+ """Create a JSON summary of search results."""
+ summary = {
+ "description": description,
+ "totalResults": len(results),
+ "results": []
+ }
+
+ for result in results:
+ summary["results"].append({
+ "name": result["name"],
+ "url": result["webUrl"],
+ "lastModified": result["lastModified"],
+ "size": result["size"]
+ })
+
+ return self.formatAgentDocumentOutput(
+ "sharepoint_search_results.json",
+ json.dumps(summary, indent=2),
+ "application/json"
+ )
+
+ def _createSearchSummaryCsv(self, results: List[Dict[str, Any]], description: str) -> Dict[str, Any]:
+ """Create a CSV summary of search results."""
+ csvLines = ["Name,URL,Last Modified,Size (bytes)"]
+
+ for result in results:
+ name = result["name"].replace('"', '""')
+ url = result["webUrl"].replace('"', '""')
+ lastModified = result["lastModified"].replace('"', '""')
+ size = str(result["size"])
+
+ csvLines.append(f'"{name}","{url}","{lastModified}",{size}')
+
+ return self.formatAgentDocumentOutput(
+ "sharepoint_search_results.csv",
+ "\n".join(csvLines),
+ "text/csv"
+ )
+
+ def _createSearchSummaryText(self, results: List[Dict[str, Any]], description: str) -> Dict[str, Any]:
+ """Create a text summary of search results."""
+ textLines = [
+ f"SharePoint Search Results",
+ f"Description: {description}",
+ f"Total Results: {len(results)}",
+ "\nResults:"
+ ]
+
+ for result in results:
+ textLines.extend([
+ f"\nName: {result['name']}",
+ f"URL: {result['webUrl']}",
+ f"Last Modified: {result['lastModified']}",
+ f"Size: {result['size']} bytes"
+ ])
+
+ return self.formatAgentDocumentOutput(
+ "sharepoint_search_results.txt",
+ "\n".join(textLines),
+ "text/plain"
+ )
+
+def getAgentSharepoint() -> AgentSharepoint:
+ """Factory function to create and return a SharePointAgent instance."""
+ return AgentSharepoint()
\ No newline at end of file
diff --git a/modules/agents/agentWebcrawler.py b/modules/agents/agentWebcrawler.py
index 1e703c3a..0f9768f4 100644
--- a/modules/agents/agentWebcrawler.py
+++ b/modules/agents/agentWebcrawler.py
@@ -80,7 +80,7 @@ class AgentWebcrawler(AgentBase):
# Create research plan
if workflow:
- self.workflowManager.logAdd(workflow, "Creating research plan...", level="info", progress=35)
+ self.service.logAdd(workflow, "Creating research plan...", level="info", progress=35)
researchPlan = await self._createResearchPlan(prompt)
# Check if this is truly a web research task
@@ -92,12 +92,12 @@ class AgentWebcrawler(AgentBase):
# Gather raw material through web research
if workflow:
- self.workflowManager.logAdd(workflow, "Gathering research material...", level="info", progress=45)
+ self.service.logAdd(workflow, "Gathering research material...", level="info", progress=45)
rawResults = await self._gatherResearchMaterial(researchPlan, workflow)
# Format results into requested output documents
if workflow:
- self.workflowManager.logAdd(workflow, "Creating output documents...", level="info", progress=55)
+ self.service.logAdd(workflow, "Creating output documents...", level="info", progress=55)
documents = await self._createOutputDocuments(
prompt,
rawResults,
@@ -213,8 +213,7 @@ class AgentWebcrawler(AgentBase):
directUrls = researchPlan.get("directUrls", [])[:self.maxUrl]
for i, url in enumerate(directUrls):
progress = 45 + int((i / len(directUrls)) * 5) # Progress from 45% to 50%
- if hasattr(self, 'workflowManager') and self.workflowManager:
- self.workflowManager.logAdd(workflow, f"Processing direct URL {i+1}/{len(directUrls)}...", level="info", progress=progress)
+ self.service.logAdd(workflow, f"Processing direct URL {i+1}/{len(directUrls)}...", level="info", progress=progress)
logger.info(f"Processing direct URL: {url}")
try:
# Fetch and extract content
@@ -240,8 +239,7 @@ class AgentWebcrawler(AgentBase):
searchTerms = researchPlan.get("searchTerms", [])[:self.maxSearchTerms]
for i, term in enumerate(searchTerms):
progress = 50 + int((i / len(searchTerms)) * 5) # Progress from 50% to 55%
- if hasattr(self, 'workflowManager') and self.workflowManager:
- self.workflowManager.logAdd(workflow, f"Searching term {i+1}/{len(searchTerms)}...", level="info", progress=progress)
+ self.service.logAdd(workflow, f"Searching term {i+1}/{len(searchTerms)}...", level="info", progress=progress)
logger.info(f"Searching for: {term}")
try:
# Perform search
diff --git a/modules/interfaces/gatewayModel.py b/modules/interfaces/gatewayModel.py
deleted file mode 100644
index 9c9ea178..00000000
--- a/modules/interfaces/gatewayModel.py
+++ /dev/null
@@ -1,151 +0,0 @@
-"""
-Data models for the gateway system.
-"""
-from pydantic import BaseModel, Field
-from typing import List, Dict, Any, Optional
-from datetime import datetime
-import uuid
-
-# Get all attributes of the model
-def getModelAttributes(modelClass):
- return [attr for attr in dir(modelClass)
- if not callable(getattr(modelClass, attr))
- and not attr.startswith('_')
- and attr not in ('metadata', 'query', 'query_class', 'label', 'field_labels')]
-
-class Label(BaseModel):
- """Label for an attribute or a class with support for multiple languages"""
- default: str = Field(..., description="Default label text")
- translations: Dict[str, str] = Field(default_factory=dict, description="Translations for different languages")
-
- class Config:
- title = "Label"
- description = "A label with support for multiple languages"
- schema_extra = {
- "example": {
- "default": "User",
- "translations": {
- "en": "User",
- "fr": "Utilisateur"
- }
- }
- }
-
- def getLabel(self, language: str = None):
- """Returns the label in the specified language, or the default value if not available"""
- if language and language in self.translations:
- return self.translations[language]
- return self.default
-
-
-class Mandate(BaseModel):
- """Data model for a mandate"""
- id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the mandate")
- name: str = Field(description="Name of the mandate")
- language: str = Field(description="Default language of the mandate")
-
- label: Label = Field(
- default=Label(default="Mandate", translations={"en": "Mandate", "fr": "Mandat"}),
- description="Label for the class"
- )
-
- # Labels for attributes
- fieldLabels: Dict[str, Label] = {
- "id": Label(default="ID", translations={}),
- "name": Label(default="Name of the mandate", translations={"en": "Mandate name", "fr": "Nom du mandat"}),
- "language": Label(default="Language", translations={"en": "Language", "fr": "Langue"})
- }
-
-class UserConnection(BaseModel):
- """Data model for a user's connection to an external service"""
- id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the connection")
- authority: str = Field(description="Authentication authority (microsoft, google, etc.)")
- externalId: str = Field(description="User ID in the external system")
- externalUsername: str = Field(description="Username in the external system")
- externalEmail: Optional[str] = Field(None, description="Email in the external system")
- connectedAt: datetime = Field(default_factory=datetime.now, description="When the connection was established")
-
- label: Label = Field(
- default=Label(default="User Connection", translations={"en": "User Connection", "fr": "Connexion utilisateur"}),
- description="Label for the class"
- )
-
- # Labels for attributes
- fieldLabels: Dict[str, Label] = {
- "id": Label(default="ID", translations={}),
- "authority": Label(default="Authority", translations={"en": "Authority", "fr": "Autorité"}),
- "externalId": Label(default="External ID", translations={"en": "External ID", "fr": "ID externe"}),
- "externalUsername": Label(default="External Username", translations={"en": "External Username", "fr": "Nom d'utilisateur externe"}),
- "externalEmail": Label(default="External Email", translations={"en": "External Email", "fr": "Email externe"}),
- "connectedAt": Label(default="Connected At", translations={"en": "Connected At", "fr": "Connecté le"})
- }
-
-class User(BaseModel):
- """Data model for a user"""
- id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the user")
- username: str = Field(description="Username for login")
- email: Optional[str] = Field(None, description="Email address of the user")
- fullName: Optional[str] = Field(None, description="Full name of the user")
- language: str = Field(description="Preferred language of the user")
- disabled: Optional[bool] = Field(False, description="Indicates whether the user is disabled")
- privilege: str = Field(description="Permission level") #sysadmin,admin,user
- authenticationAuthority: str = Field(default="local", description="Primary authentication authority (local, microsoft)")
- mandateId: str = Field(description="ID of the mandate this user belongs to")
- connections: List[UserConnection] = Field(default_factory=list, description="List of external service connections")
-
- label: Label = Field(
- default=Label(default="User", translations={"en": "User", "fr": "Utilisateur"}),
- description="Label for the class"
- )
-
- # Labels for attributes
- fieldLabels: Dict[str, Label] = {
- "id": Label(default="ID", translations={}),
- "mandateId": Label(default="Mandate ID", translations={"en": "Mandate ID", "fr": "ID de mandat"}),
- "username": Label(default="Username", translations={"en": "Username", "fr": "Nom d'utilisateur"}),
- "email": Label(default="Email", translations={"en": "Email", "fr": "E-mail"}),
- "fullName": Label(default="Full name", translations={"en": "Full name", "fr": "Nom complet"}),
- "language": Label(default="Language", translations={"en": "Language", "fr": "Langue"}),
- "disabled": Label(default="Disabled", translations={"en": "Disabled", "fr": "Désactivé"}),
- "privilege": Label(default="Permission level", translations={"en": "Access level", "fr": "Niveau d'accès"}),
- "authenticationAuthority": Label(default="Authentication Authority", translations={"en": "Authentication Authority", "fr": "Autorité d'authentification"}),
- "connections": Label(default="External Connections", translations={"en": "External Connections", "fr": "Connexions externes"})
- }
-
-
-class UserInDB(User):
- """Extended user class with password hash"""
- hashedPassword: str = Field(description="Hash of the user password")
-
- label: Label = Field(
- default=Label(default="User Access", translations={"en": "User Access", "fr": "Accès de l'utilisateur"}),
- description="Label for the class"
- )
-
- # Additional label for the password field
- fieldLabels: Dict[str, Label] = {
- "hashedPassword": Label(default="Password hash", translations={"en": "Password hash", "fr": "Hachage de mot de passe"})
- }
-
-
-class Token(BaseModel):
- """Data model for an authentication token"""
- accessToken: str = Field(description="The issued access token")
- tokenType: str = Field(description="Type of token (usually 'bearer')")
- label: Label = Field(
- default=Label(default="Token", translations={"en": "Token", "fr": "Jeton"}),
- description="Label for the class"
- )
-
- # Labels for attributes
- fieldLabels: Dict[str, Label] = {
- "accessToken": Label(default="Access token", translations={"en": "Access token", "fr": "Jeton d'accès"}),
- "tokenType": Label(default="Token type", translations={"en": "Token type", "fr": "Type de jeton"})
- }
-
-
-class TokenData(BaseModel):
- """Data for token decoding and validation"""
- username: Optional[str] = None
- mandateId: Optional[str] = None
- exp: Optional[datetime] = None
\ No newline at end of file
diff --git a/modules/interfaces/googleAccess.py b/modules/interfaces/googleAccess.py
deleted file mode 100644
index 5ad21087..00000000
--- a/modules/interfaces/googleAccess.py
+++ /dev/null
@@ -1,113 +0,0 @@
-"""
-Access control module for Google interface.
-Handles user access management and permission checks for Google tokens.
-"""
-
-from typing import Dict, Any, List, Optional
-
-class GoogleAccess:
- """
- Access control class for Google interface.
- Handles user access management and permission checks for Google tokens.
- """
-
- def __init__(self, currentUser: Dict[str, Any], db):
- """Initialize with user context."""
- self.currentUser = currentUser
- self._mandateId = currentUser.get("_mandateId")
- self._userId = currentUser.get("id")
-
- if not self._mandateId or not self._userId:
- raise ValueError("Invalid user context: _mandateId and id are required")
-
- self.db = db
-
- def uam(self, table: str, recordset: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
- """
- Unified user access management function that filters data based on user privileges
- and adds access control attributes.
-
- Args:
- table: Name of the table
- recordset: Recordset to filter based on access rules
-
- Returns:
- Filtered recordset with access control attributes
- """
- userPrivilege = self.currentUser.get("privilege", "user")
- filtered_records = []
-
- # Apply filtering based on privilege
- if userPrivilege == "sysadmin":
- filtered_records = recordset # System admins see all records
- elif userPrivilege == "admin":
- # Admins see records in their mandate
- filtered_records = [r for r in recordset if r.get("_mandateId") == self._mandateId]
- else: # Regular users
- # Users only see their own Google tokens
- filtered_records = [r for r in recordset
- if r.get("_mandateId") == self._mandateId and r.get("_userId") == self._userId]
-
- # Add access control attributes to each record
- for record in filtered_records:
- record_id = record.get("id")
-
- # Set access control flags based on user permissions
- if table == "googleTokens":
- record["_hideView"] = False # Everyone can view their own tokens
- record["_hideEdit"] = not self.canModify("googleTokens", record_id)
- record["_hideDelete"] = not self.canModify("googleTokens", record_id)
- else:
- # Default access control for other tables
- record["_hideView"] = False
- record["_hideEdit"] = not self.canModify(table, record_id)
- record["_hideDelete"] = not self.canModify(table, record_id)
-
- return filtered_records
-
- def canModify(self, table: str, recordId: Optional[str] = None) -> bool:
- """
- Checks if the current user can modify (create/update/delete) records in a table.
-
- Args:
- table: Name of the table
- recordId: Optional record ID for specific record check
-
- Returns:
- Boolean indicating permission
- """
- userPrivilege = self.currentUser.get("privilege", "user")
-
- # System admins can modify anything
- if userPrivilege == "sysadmin":
- return True
-
- # Check specific record permissions
- if recordId is not None:
- # Get the record to check ownership
- records = self.db.getRecordset(table, recordFilter={"id": recordId})
- if not records:
- return False
-
- record = records[0]
-
- # Admins can modify anything in their mandate
- if userPrivilege == "admin" and record.get("_mandateId") == self._mandateId:
- return True
-
- # Users can only modify their own Google tokens
- if (record.get("_mandateId") == self._mandateId and
- record.get("_userId") == self._userId):
- return True
-
- return False
- else:
- # For general table modify permission (e.g., create)
- # Admins can create anything in their mandate
- if userPrivilege == "admin":
- return True
-
- # Regular users can create their own Google tokens
- if table == "googleTokens":
- return True
- return False
\ No newline at end of file
diff --git a/modules/interfaces/googleInterface.py b/modules/interfaces/googleInterface.py
deleted file mode 100644
index 7f60a53c..00000000
--- a/modules/interfaces/googleInterface.py
+++ /dev/null
@@ -1,287 +0,0 @@
-"""
-Google interface for handling Google authentication and API operations.
-"""
-
-import logging
-import requests
-from typing import Dict, Any, Optional, Tuple
-from datetime import datetime
-import secrets
-from google.oauth2.credentials import Credentials
-from google_auth_oauthlib.flow import Flow
-from google.auth.transport.requests import Request
-import os
-
-from modules.shared.configuration import APP_CONFIG
-from modules.interfaces.googleModel import GoogleToken, GoogleUserInfo, GoogleConfig
-from modules.connectors.connectorDbJson import DatabaseConnector
-from modules.interfaces.googleAccess import GoogleAccess
-from modules.interfaces.gatewayInterface import getRootUser
-
-logger = logging.getLogger(__name__)
-
-# Singleton factory for GoogleInterface instances per context
-_googleInterfaces = {}
-
-# Root interface instance
-_rootGoogleInterface = None
-
-class GoogleInterface:
- """Interface for Google authentication and API operations"""
-
- def __init__(self, currentUser: Dict[str, Any] = None):
- """Initialize the Google interface"""
- # Initialize variables
- self.currentUser = currentUser
- self.mandateId = currentUser.get("mandateId") if currentUser else None
- self.userId = currentUser.get("id") if currentUser else None
- self.access = None # Will be set when user context is provided
-
- # Initialize configuration
- self.clientId = APP_CONFIG.get("Service_GOOGLE_CLIENT_ID")
- self.clientSecret = APP_CONFIG.get("Service_GOOGLE_CLIENT_SECRET")
- self.redirectUri = APP_CONFIG.get("Service_GOOGLE_REDIRECT_URI")
- self.authorityUrl = "https://accounts.google.com"
- self.tokenUrl = "https://oauth2.googleapis.com/token"
- self.userInfoUrl = "https://www.googleapis.com/oauth2/v3/userinfo"
- self.scopes = ["openid", "profile", "email"]
-
- # Initialize database
- self._initializeDatabase()
-
- # Initialize OAuth2 flow
- self.flow = Flow.from_client_config(
- {
- "web": {
- "client_id": self.clientId,
- "client_secret": self.clientSecret,
- "auth_uri": f"{self.authorityUrl}/o/oauth2/auth",
- "token_uri": self.tokenUrl,
- "redirect_uris": [self.redirectUri]
- }
- },
- scopes=self.scopes
- )
-
- # Set user context if provided
- if currentUser:
- self.setUserContext(currentUser)
-
- def _initializeDatabase(self):
- """Initializes the database connection."""
- try:
- # Get configuration values with defaults
- dbHost = APP_CONFIG.get("DB_GOOGLE_HOST", "data")
- dbDatabase = APP_CONFIG.get("DB_GOOGLE_DATABASE", "google")
- dbUser = APP_CONFIG.get("DB_GOOGLE_USER")
- dbPassword = APP_CONFIG.get("DB_GOOGLE_PASSWORD_SECRET")
-
- # Ensure the database directory exists
- os.makedirs(dbHost, exist_ok=True)
-
- self.db = DatabaseConnector(
- dbHost=dbHost,
- dbDatabase=dbDatabase,
- dbUser=dbUser,
- dbPassword=dbPassword,
- mandateId=self.mandateId,
- userId=self.userId
- )
-
- # Set context
- self.db.updateContext(self.mandateId, self.userId)
-
- logger.info("Database initialized successfully")
- except Exception as e:
- logger.error(f"Failed to initialize database: {str(e)}")
- raise
-
- def initiateLogin(self) -> str:
- """Initiate Google login flow"""
- try:
- # Generate auth URL
- auth_url, _ = self.flow.authorization_url(
- access_type="offline",
- include_granted_scopes="true",
- state=self._generateState()
- )
- return auth_url
- except Exception as e:
- logger.error(f"Error initiating Google login: {str(e)}")
- return None
-
- def handleAuthCallback(self, code: str) -> Optional[GoogleToken]:
- """Handle Google OAuth callback"""
- try:
- # Exchange code for token
- self.flow.fetch_token(code=code)
- credentials = self.flow.credentials
-
- # Get user info
- user_info = self.getUserInfoFromToken(credentials.token)
- if not user_info:
- return None
-
- # Create token model
- token = GoogleToken(
- access_token=credentials.token,
- refresh_token=credentials.refresh_token,
- expires_in=credentials.expiry.timestamp() - datetime.now().timestamp(),
- token_type=credentials.token_type,
- expires_at=credentials.expiry.timestamp(),
- user_info=user_info.model_dump(),
- mandateId=self.mandateId,
- userId=self.userId
- )
-
- return token
-
- except Exception as e:
- logger.error(f"Error handling auth callback: {str(e)}")
- return None
-
- def verifyToken(self, token: str) -> bool:
- """Verify Google token"""
- try:
- # Get user info from token
- user_info = self.getUserInfoFromToken(token)
- if not user_info:
- return False
-
- # Get current user's Google connection
- user = self.db.getRecordset("users", recordFilter={"id": self.userId})[0]
- google_connection = next((conn for conn in user.get("connections", [])
- if conn.get("authority") == "google"), None)
-
- if not google_connection:
- return False
-
- # Verify the token belongs to this user
- return user_info.id == google_connection.get("externalId")
-
- except Exception as e:
- logger.error(f"Error verifying Google token: {str(e)}")
- return False
-
- def getUserInfoFromToken(self, token: str) -> Optional[GoogleUserInfo]:
- """Get user info from Google API"""
- try:
- # Call Google API
- response = requests.get(
- self.userInfoUrl,
- headers={"Authorization": f"Bearer {token}"}
- )
-
- if response.status_code != 200:
- logger.error(f"Failed to get user info: {response.text}")
- return None
-
- data = response.json()
-
- # Create user info model
- return GoogleUserInfo(
- id=data["sub"], # Google uses 'sub' as the unique identifier
- email=data["email"],
- name=data.get("name", ""),
- picture=data.get("picture") # Google provides profile picture URL
- )
-
- except Exception as e:
- logger.error(f"Error getting user info: {str(e)}")
- return None
-
- def refreshToken(self, refresh_token: str) -> Optional[GoogleToken]:
- """Refresh Google token"""
- try:
- # Create credentials object
- credentials = Credentials(
- None, # No access token
- refresh_token=refresh_token,
- token_uri=self.tokenUrl,
- client_id=self.clientId,
- client_secret=self.clientSecret
- )
-
- # Refresh token
- credentials.refresh(Request())
-
- # Get user info
- user_info = self.getUserInfoFromToken(credentials.token)
- if not user_info:
- return None
-
- # Create token model
- token = GoogleToken(
- access_token=credentials.token,
- refresh_token=credentials.refresh_token or refresh_token,
- expires_in=credentials.expiry.timestamp() - datetime.now().timestamp(),
- token_type=credentials.token_type,
- expires_at=credentials.expiry.timestamp(),
- user_info=user_info.model_dump(),
- mandateId=self.mandateId,
- userId=self.userId
- )
-
- return token
-
- except Exception as e:
- logger.error(f"Error refreshing token: {str(e)}")
- return None
-
- def _generateState(self) -> str:
- """Generate secure state token"""
- return secrets.token_urlsafe(32)
-
- def setUserContext(self, currentUser: Dict[str, Any]):
- """Set user context for the interface"""
- if not currentUser:
- logger.info("Initializing interface without user context")
- return
-
- self.currentUser = currentUser
- self.mandateId = currentUser.get("mandateId")
- self.userId = currentUser.get("id")
-
- if not self.mandateId or not self.userId:
- raise ValueError("Invalid user context: mandateId and id are required")
-
- # Initialize access control with user context
- self.access = GoogleAccess(self.currentUser, self.db)
-
- # Update database context
- self.db.updateContext(self.mandateId, self.userId)
-
- logger.debug(f"User context set: userId={self.userId}")
-
-def getRootInterface() -> GoogleInterface:
- """
- Returns a GoogleInterface instance with root privileges.
- This is used for initial setup and user creation.
- """
- global _rootGoogleInterface
-
- if _rootGoogleInterface is None:
- # Get root user from gateway
- rootUser = getRootUser()
- _rootGoogleInterface = GoogleInterface(rootUser)
-
- return _rootGoogleInterface
-
-def getInterface(currentUser: Dict[str, Any] = None) -> GoogleInterface:
- """
- Returns a GoogleInterface instance.
- If currentUser is provided, initializes with user context.
- Otherwise, returns an instance with only database access.
- """
- # Create new instance if not exists
- if "default" not in _googleInterfaces:
- _googleInterfaces["default"] = GoogleInterface(currentUser or {})
-
- interface = _googleInterfaces["default"]
-
- if currentUser:
- interface.setUserContext(currentUser)
- else:
- logger.info("Returning interface without user context")
-
- return interface
\ No newline at end of file
diff --git a/modules/interfaces/googleModel.py b/modules/interfaces/googleModel.py
deleted file mode 100644
index a85ca644..00000000
--- a/modules/interfaces/googleModel.py
+++ /dev/null
@@ -1,35 +0,0 @@
-"""
-Models for Google authentication and API operations.
-"""
-
-from pydantic import BaseModel, Field
-from typing import Optional, Dict, Any
-from datetime import datetime
-
-class GoogleToken(BaseModel):
- """Model for Google OAuth tokens"""
- access_token: str
- refresh_token: Optional[str] = None
- expires_in: int
- token_type: str = "bearer"
- expires_at: float
- user_info: Dict[str, Any]
- mandateId: str
- userId: str
-
-class GoogleUserInfo(BaseModel):
- """Model for Google user information"""
- id: str # Google uses 'sub' as the unique identifier
- email: str
- name: str
- picture: Optional[str] = None # Google provides profile picture URL
-
-class GoogleConfig(BaseModel):
- """Configuration for Google authentication service"""
- client_id: str
- client_secret: str
- redirect_uri: str
- scopes: list[str]
- authority_url: str = "https://accounts.google.com"
- token_url: str = "https://oauth2.googleapis.com/token"
- user_info_url: str = "https://www.googleapis.com/oauth2/v3/userinfo"
\ No newline at end of file
diff --git a/modules/interfaces/lucydomModel.py b/modules/interfaces/lucydomModel.py
deleted file mode 100644
index d88500e4..00000000
--- a/modules/interfaces/lucydomModel.py
+++ /dev/null
@@ -1,246 +0,0 @@
-"""
-LucyDOM model classes for the workflow and document system.
-"""
-
-from pydantic import BaseModel, Field
-from typing import List, Dict, Any, Optional
-from datetime import datetime
-import uuid
-
-# Get all attributes of the model
-def getModelAttributes(modelClass):
- return [attr for attr in dir(modelClass)
- if not callable(getattr(modelClass, attr))
- and not attr.startswith('_')
- and attr not in ('metadata', 'query', 'query_class', 'label', 'field_labels')]
-
-# CORE MODELS
-
-class Label(BaseModel):
- """Label for an attribute or a class with support for multiple languages"""
- default: str = Field(..., description="Default label text")
- translations: Dict[str, str] = Field(default_factory=dict, description="Translations for different languages")
-
- class Config:
- title = "Label"
- description = "A label with support for multiple languages"
- schema_extra = {
- "example": {
- "default": "Document",
- "translations": {
- "en": "Document",
- "fr": "Document"
- }
- }
- }
-
- def getLabel(self, language: str = None):
- """Returns the label in the specified language, or the default value if not available"""
- if language and language in self.translations:
- return self.translations[language]
- return self.default
-
-
-class Prompt(BaseModel):
- """Data model for a prompt"""
- id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the prompt")
- content: str = Field(description="Content of the prompt")
- name: str = Field(description="Display name of the prompt")
- mandateId: str = Field(description="ID of the mandate this prompt belongs to")
-
- label: Label = Field(
- default=Label(default="Prompt", translations={"en": "Prompt", "fr": "Invite"}),
- description="Label for the class"
- )
-
- # Labels for attributes
- fieldLabels: Dict[str, Label] = {
- "id": Label(default="ID", translations={}),
- "content": Label(default="Content", translations={"en": "Content", "fr": "Contenu"}),
- "name": Label(default="Name", translations={"en": "Label", "fr": "Nom"}),
- "mandateId": Label(default="Mandate ID", translations={"en": "Mandate ID", "fr": "ID de mandat"})
- }
-
-
-class FileItem(BaseModel):
- """Data model for a file"""
- id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the data object")
- mimeType: str = Field(description="Type of the file MIME type")
- fileName: str = Field(description="Name of the file")
- fileSize: int = Field(description="Size of the file in bytes")
- fileHash: str = Field(description="Hash code for deduplication")
- workflowId: Optional[str] = Field(None, description="ID of the associated workflow, if any")
- mandateId: str = Field(description="ID of the mandate this file belongs to")
-
- label: Label = Field(
- default=Label(default="Data Object", translations={"en": "Data Object", "fr": "Objet de données"}),
- description="Label for the class"
- )
-
- # Labels for attributes
- fieldLabels: Dict[str, Label] = {
- "id": Label(default="ID", translations={}),
- "mimeType": Label(default="Type", translations={"en": "Type", "fr": "Type"}),
- "fileName": Label(default="Filename", translations={"en": "fileName", "fr": "Nom de fichier"}),
- "fileSize": Label(default="Size", translations={"en": "Size", "fr": "Taille"}),
- "fileHash": Label(default="File Hash", translations={"en": "Hash", "fr": "Hash"}),
- "workflowId": Label(default="Workflow ID", translations={"en": "Workflow ID", "fr": "ID du workflow"}),
- "mandateId": Label(default="Mandate ID", translations={"en": "Mandate ID", "fr": "ID de mandat"})
- }
-
-
-class FileData(BaseModel):
- """Data model for file content"""
- id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the data object")
- data: str = Field(description="content of the file, text or base64 encoded based on base64Encoded flag")
- base64Encoded: bool = Field(description="Flag indicating whether the data is base64 encoded")
- workflowId: Optional[str] = Field(None, description="ID of the associated workflow, if any")
-
-# WORKFLOW MODELS
-
-class ChatContent(BaseModel):
- """Content of a document in the chat"""
- sequenceNr: int = Field(1, description="Sequence number of the content in the source document")
- name: str = Field(description="Designation")
- mimeType: str = Field(description="MIME type")
- data: str = Field(description="Actual content")
- metadata: Dict[str, Any] = Field(default_factory=dict, description="Metadata about the content")
-
-
-class ChatDocument(BaseModel):
- """Document in the chat workflow"""
- id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the document")
- fileId: str = Field(description="ID of the referenced file in the database")
- fileName: str = Field(description="Name of the file")
- fileSize: int = Field(description="Size of the file in bytes")
- mimeType: str = Field(description="MIME type")
- contents: List[ChatContent] = Field(default=[], description="Document contents")
-
-
-class ChatStat(BaseModel):
- """Statistics for performance and data usage"""
- id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the stats")
- workflowId: str = Field(description="ID of the associated workflow")
- processingTime: Optional[float] = Field(None, description="Processing time in seconds")
- tokenCount: Optional[int] = Field(None, description="Token count (for AI models)")
- bytesSent: Optional[int] = Field(None, description="Bytes sent")
- bytesReceived: Optional[int] = Field(None, description="Bytes received")
-
- label: Label = Field(
- default=Label(default="Chat Statistics", translations={"en": "Chat Statistics", "fr": "Statistiques de chat"}),
- description="Label for the class"
- )
-
- # Labels for attributes
- fieldLabels: Dict[str, Label] = {
- "id": Label(default="ID", translations={}),
- "workflowId": Label(default="Workflow ID", translations={"en": "Workflow ID", "fr": "ID du workflow"}),
- "processingTime": Label(default="Processing Time", translations={"en": "Processing Time", "fr": "Temps de traitement"}),
- "tokenCount": Label(default="Token Count", translations={"en": "Token Count", "fr": "Nombre de tokens"}),
- "bytesSent": Label(default="Bytes Sent", translations={"en": "Bytes Sent", "fr": "Octets envoyés"}),
- "bytesReceived": Label(default="Bytes Received", translations={"en": "Bytes Received", "fr": "Octets reçus"})
- }
-
-
-class ChatMessage(BaseModel):
- """Message object in the chat workflow"""
- id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the message")
- workflowId: str = Field(description="Reference to the parent workflow")
- parentMessageId: Optional[str] = Field(None, description="Reference to the replied message")
- agentName: Optional[str] = Field(None, description="Name of the agent used")
- documents: Optional[List[ChatDocument]] = Field(None, description="Documents in this message")
- message: Optional[str] = Field(None, description="Text content of the message")
- role: str = Field(description="Role of the sender ('system', 'user', 'assistant')")
- status: str = Field(description="Status of the message ('first', 'step', 'last')")
-
- sequenceNr: int = Field(description="Sequence number for sorting")
- startedAt: datetime = Field(description="Timestamp for message creation")
- finishedAt: Optional[datetime] = Field(None, description="Timestamp for message completion")
- stats: Optional[ChatStat] = Field(None, description="Statistics")
-
-
-class ChatLog(BaseModel):
- """Log entry for a chat workflow"""
- id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the log entry")
- workflowId: str = Field(description="ID of the associated workflow")
- message: str = Field(description="Log message content")
- type: str = Field(description="Type of log ('info', 'warning', 'error')")
- timestamp: str = Field(description="Timestamp of the log entry")
- agentName: str = Field(description="Name of the agent that created the log")
- status: str = Field(description="Status of the workflow at log time")
- progress: Optional[int] = Field(None, description="Progress value (0-100)")
-
-
-class ChatWorkflow(BaseModel):
- """Chat workflow object for multi-agent system"""
- id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the chat workflow")
- status: str = Field(description="Status of the chat workflow")
- name: Optional[str] = Field(None, description="Name of the chat workflow")
- currentRound: int = Field(default=1, description="Current round/iteration")
- lastActivity: str = Field(description="Timestamp of the last activity")
- startedAt: str = Field(description="Start timestamp")
- logs: List[ChatLog] = Field(default=[], description="Log entries")
- messages: List[ChatMessage] = Field(default=[], description="Message history")
- stats: Optional[ChatStat] = Field(None, description="Statistics")
- mandateId: str = Field(description="ID of the mandate this workflow belongs to")
-
- label: Label = Field(
- default=Label(default="Chat Workflow", translations={"en": "Chat Workflow", "fr": "Workflow de chat"}),
- description="Label for the class"
- )
-
- # Labels for attributes
- fieldLabels: Dict[str, Label] = {
- "id": Label(default="ID", translations={}),
- "status": Label(default="Status", translations={"en": "Status", "fr": "Statut"}),
- "name": Label(default="Name", translations={"en": "Name", "fr": "Nom"}),
- "currentRound": Label(default="Current Round", translations={"en": "Current Round", "fr": "Tour actuel"}),
- "lastActivity": Label(default="Last Activity", translations={"en": "Last Activity", "fr": "Dernière activité"}),
- "startedAt": Label(default="Started At", translations={"en": "Started At", "fr": "Démarré à"}),
- "logs": Label(default="Logs", translations={"en": "Logs", "fr": "Journaux"}),
- "messages": Label(default="Messages", translations={"en": "Messages", "fr": "Messages"}),
- "stats": Label(default="Statistics", translations={"en": "Statistics", "fr": "Statistiques"}),
- "mandateId": Label(default="Mandate ID", translations={"en": "Mandate ID", "fr": "ID de mandat"})
- }
-
-
-# AGENT AND TASK MODELS
-
-class Agent(BaseModel):
- """Data model for an agent"""
- id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the agent")
- name: str = Field(description="Name of the agent")
- description: str = Field(description="Description of the agent's functionality")
- capabilities: List[str] = Field(default=[], description="List of agent capabilities")
-
-
-class AgentResponse(BaseModel):
- """Response structure returned by agent processing"""
- response: str = Field(description="Text response from the agent")
- documents: List[ChatDocument] = Field(default=[], description="List of document objects created by the agent")
-
-
-class TaskItem(BaseModel):
- """Individual task in the workplan"""
- sequenceNr: int = Field(description="Sequence number of the task")
- agentName: str = Field(description="Name of an available agent")
- prompt: str = Field(description="Specific instructions to the agent")
- userLanguage: str = Field(description="Language code of the user's request")
- filesInput: List[str] = Field(default=[], description="List of input files in format 'fileName[;documentId]'")
- filesOutput: List[str] = Field(default=[], description="List of output files in format 'fileName'")
-
-
-class TaskPlan(BaseModel):
- """Work plan created by project manager"""
- fileList: List[str] = Field(default=[], description="List of required result documents in format 'fileName'")
- taskItems: List[TaskItem] = Field(default=[], description="Plan for executing agents")
- userResponse: str = Field(description="Response to the user explaining the plan")
- userLanguage: str = Field(default="en", description="Language code of the user's request")
-
-
-class UserInputRequest(BaseModel):
- """Request for user input to a running workflow"""
- prompt: str = Field(description="Message from the user")
- listFileId: List[str] = Field(default=[], description="List of FileItem IDs")
- metadata: Dict[str, Any] = Field(default_factory=dict, description="Additional metadata for the request")
-
diff --git a/modules/interfaces/msftAccess.py b/modules/interfaces/msftAccess.py
deleted file mode 100644
index bdbce32c..00000000
--- a/modules/interfaces/msftAccess.py
+++ /dev/null
@@ -1,113 +0,0 @@
-"""
-Access control module for Microsoft interface.
-Handles user access management and permission checks for Microsoft tokens.
-"""
-
-from typing import Dict, Any, List, Optional
-
-class MsftAccess:
- """
- Access control class for Microsoft interface.
- Handles user access management and permission checks for Microsoft tokens.
- """
-
- def __init__(self, currentUser: Dict[str, Any], db):
- """Initialize with user context."""
- self.currentUser = currentUser
- self._mandateId = currentUser.get("_mandateId")
- self._userId = currentUser.get("id")
-
- if not self._mandateId or not self._userId:
- raise ValueError("Invalid user context: _mandateId and id are required")
-
- self.db = db
-
- def uam(self, table: str, recordset: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
- """
- Unified user access management function that filters data based on user privileges
- and adds access control attributes.
-
- Args:
- table: Name of the table
- recordset: Recordset to filter based on access rules
-
- Returns:
- Filtered recordset with access control attributes
- """
- userPrivilege = self.currentUser.get("privilege", "user")
- filtered_records = []
-
- # Apply filtering based on privilege
- if userPrivilege == "sysadmin":
- filtered_records = recordset # System admins see all records
- elif userPrivilege == "admin":
- # Admins see records in their mandate
- filtered_records = [r for r in recordset if r.get("_mandateId") == self._mandateId]
- else: # Regular users
- # Users only see their own Microsoft tokens
- filtered_records = [r for r in recordset
- if r.get("_mandateId") == self._mandateId and r.get("_userId") == self._userId]
-
- # Add access control attributes to each record
- for record in filtered_records:
- record_id = record.get("id")
-
- # Set access control flags based on user permissions
- if table == "msftTokens":
- record["_hideView"] = False # Everyone can view their own tokens
- record["_hideEdit"] = not self.canModify("msftTokens", record_id)
- record["_hideDelete"] = not self.canModify("msftTokens", record_id)
- else:
- # Default access control for other tables
- record["_hideView"] = False
- record["_hideEdit"] = not self.canModify(table, record_id)
- record["_hideDelete"] = not self.canModify(table, record_id)
-
- return filtered_records
-
- def canModify(self, table: str, recordId: Optional[str] = None) -> bool:
- """
- Checks if the current user can modify (create/update/delete) records in a table.
-
- Args:
- table: Name of the table
- recordId: Optional record ID for specific record check
-
- Returns:
- Boolean indicating permission
- """
- userPrivilege = self.currentUser.get("privilege", "user")
-
- # System admins can modify anything
- if userPrivilege == "sysadmin":
- return True
-
- # Check specific record permissions
- if recordId is not None:
- # Get the record to check ownership
- records = self.db.getRecordset(table, recordFilter={"id": recordId})
- if not records:
- return False
-
- record = records[0]
-
- # Admins can modify anything in their mandate
- if userPrivilege == "admin" and record.get("_mandateId") == self._mandateId:
- return True
-
- # Users can only modify their own Microsoft tokens
- if (record.get("_mandateId") == self._mandateId and
- record.get("_userId") == self._userId):
- return True
-
- return False
- else:
- # For general table modify permission (e.g., create)
- # Admins can create anything in their mandate
- if userPrivilege == "admin":
- return True
-
- # Regular users can create their own Microsoft tokens
- if table == "msftTokens":
- return True
- return False
\ No newline at end of file
diff --git a/modules/interfaces/msftInterface.py b/modules/interfaces/msftInterface.py
deleted file mode 100644
index d6720aae..00000000
--- a/modules/interfaces/msftInterface.py
+++ /dev/null
@@ -1,520 +0,0 @@
-"""
-Microsoft interface for handling Microsoft authentication and Graph API operations.
-"""
-
-import logging
-import json
-import requests
-import base64
-import msal
-from typing import Dict, Any, Optional, List, Tuple
-from datetime import datetime, timedelta
-import secrets
-import os
-
-from modules.shared.configuration import APP_CONFIG
-from .msftModel import MsftToken, MsftUserInfo, MsftConfig
-from modules.connectors.connectorDbJson import DatabaseConnector
-from .msftAccess import MsftAccess
-from modules.interfaces.gatewayInterface import getRootUser
-
-logger = logging.getLogger(__name__)
-
-# Singleton factory for MsftInterface instances per context
-_msftInterfaces = {}
-
-# Root interface instance
-_rootMsftInterface = None
-
-class MsftInterface:
- """Interface for Microsoft authentication and Graph API operations"""
-
- def __init__(self, currentUser: Dict[str, Any] = None):
- """Initialize the Microsoft interface"""
- # Initialize variables
- self.currentUser = currentUser
- self.mandateId = currentUser.get("mandateId") if currentUser else None
- self.userId = currentUser.get("id") if currentUser else None
- self.access = None # Will be set when user context is provided
-
- # Initialize configuration
- self.clientId = APP_CONFIG.get("Service_MSFT_CLIENT_ID")
- self.clientSecret = APP_CONFIG.get("Service_MSFT_CLIENT_SECRET")
- self.tenantId = APP_CONFIG.get("Service_MSFT_TENANT_ID", "common")
- self.redirectUri = APP_CONFIG.get("Service_MSFT_REDIRECT_URI")
- self.authority = f"https://login.microsoftonline.com/{self.tenantId}"
- self.scopes = ["Mail.ReadWrite", "User.Read"]
-
- # Initialize database
- self._initializeDatabase()
-
- # Initialize MSAL application
- self.msal_app = msal.ConfidentialClientApplication(
- self.clientId,
- authority=self.authority,
- client_credential=self.clientSecret
- )
-
- # Set user context if provided
- if currentUser:
- self.setUserContext(currentUser)
-
- def _initializeDatabase(self):
- """Initializes the database connection."""
- try:
- # Get configuration values with defaults
- dbHost = APP_CONFIG.get("DB_MSFT_HOST", "data")
- dbDatabase = APP_CONFIG.get("DB_MSFT_DATABASE", "msft")
- dbUser = APP_CONFIG.get("DB_MSFT_USER")
- dbPassword = APP_CONFIG.get("DB_MSFT_PASSWORD_SECRET")
-
- # Ensure the database directory exists
- os.makedirs(dbHost, exist_ok=True)
-
- self.db = DatabaseConnector(
- dbHost=dbHost,
- dbDatabase=dbDatabase,
- dbUser=dbUser,
- dbPassword=dbPassword,
- mandateId=self.mandateId,
- userId=self.userId
- )
-
- # Set context
- self.db.updateContext(self.mandateId, self.userId)
-
- logger.info("Database initialized successfully")
- except Exception as e:
- logger.error(f"Failed to initialize database: {str(e)}")
- raise
-
- def _uam(self, table: str, recordset: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
- """
- Unified user access management function that filters data based on user privileges
- and adds access control attributes.
-
- Args:
- table: Name of the table
- recordset: Recordset to filter based on access rules
-
- Returns:
- Filtered recordset with access control attributes
- """
- return self.access.uam(table, recordset)
-
- def _canModify(self, table: str, recordId: Optional[str] = None) -> bool:
- """
- Checks if the current user can modify (create/update/delete) records in a table.
-
- Args:
- table: Name of the table
- recordId: Optional record ID for specific record check
-
- Returns:
- Boolean indicating permission
- """
- return self.access.canModify(table, recordId)
-
- def initiateLogin(self) -> str:
- """Initiate Microsoft login flow"""
- try:
- # Generate auth URL
- auth_url = self.msal_app.get_authorization_request_url(
- scopes=self.scopes,
- redirect_uri=self.redirectUri,
- state=self._generateState()
- )
- return auth_url
- except Exception as e:
- logger.error(f"Error initiating Microsoft login: {str(e)}")
- return None
-
- def handleAuthCallback(self, code: str) -> Optional[MsftToken]:
- """Handle Microsoft OAuth callback"""
- try:
- # Get token from code
- token_response = self.msal_app.acquire_token_by_authorization_code(
- code,
- scopes=self.scopes,
- redirect_uri=self.redirectUri
- )
-
- if "error" in token_response:
- logger.error(f"Token acquisition failed: {token_response['error']}")
- return None
-
- # Get user info
- user_info = self.getUserInfoFromToken(token_response["access_token"])
- if not user_info:
- return None
-
- # Create token model
- token = MsftToken(
- access_token=token_response["access_token"],
- refresh_token=token_response.get("refresh_token", ""),
- expires_in=token_response.get("expires_in", 0),
- token_type=token_response.get("token_type", "bearer"),
- expires_at=datetime.now().timestamp() + token_response.get("expires_in", 0),
- user_info=user_info.model_dump(),
- mandateId=self.mandateId,
- userId=self.userId
- )
-
- return token
-
- except Exception as e:
- logger.error(f"Error handling auth callback: {str(e)}")
- return None
-
- def verifyToken(self, token: str) -> bool:
- """Verify Microsoft token"""
- try:
- # Get user info from token
- user_info = self.getUserInfoFromToken(token)
- if not user_info:
- return False
-
- # Get current user's Microsoft connection
- user = self.db.getRecordset("users", recordFilter={"id": self.userId})[0]
- msft_connection = next((conn for conn in user.get("connections", [])
- if conn.get("authority") == "microsoft"), None)
-
- if not msft_connection:
- return False
-
- # Verify the token belongs to this user
- return user_info.id == msft_connection.get("externalId")
-
- except Exception as e:
- logger.error(f"Error verifying Microsoft token: {str(e)}")
- return False
-
- def getUserInfoFromToken(self, token: str) -> Optional[MsftUserInfo]:
- """Get user info from Microsoft Graph"""
- try:
- # Call Microsoft Graph API
- response = requests.get(
- "https://graph.microsoft.com/v1.0/me",
- headers={"Authorization": f"Bearer {token}"}
- )
-
- if response.status_code != 200:
- logger.error(f"Failed to get user info: {response.text}")
- return None
-
- data = response.json()
-
- # Create user info model
- return MsftUserInfo(
- id=data["id"],
- email=data.get("mail") or data.get("userPrincipalName"),
- name=data.get("displayName", ""),
- picture=None # Microsoft Graph doesn't provide profile picture by default
- )
-
- except Exception as e:
- logger.error(f"Error getting user info: {str(e)}")
- return None
-
- def refreshToken(self, refresh_token: str) -> Optional[MsftToken]:
- """Refresh Microsoft token"""
- try:
- # Refresh token
- token_response = self.msal_app.acquire_token_by_refresh_token(
- refresh_token,
- scopes=self.scopes
- )
-
- if "error" in token_response:
- logger.error(f"Token refresh failed: {token_response['error']}")
- return None
-
- # Get user info
- user_info = self.getUserInfoFromToken(token_response["access_token"])
- if not user_info:
- return None
-
- # Create token model
- token = MsftToken(
- access_token=token_response["access_token"],
- refresh_token=token_response.get("refresh_token", refresh_token),
- expires_in=token_response.get("expires_in", 0),
- token_type=token_response.get("token_type", "bearer"),
- expires_at=datetime.now().timestamp() + token_response.get("expires_in", 0),
- user_info=user_info.model_dump(),
- mandateId=self.mandateId,
- userId=self.userId
- )
-
- return token
-
- except Exception as e:
- logger.error(f"Error refreshing token: {str(e)}")
- return None
-
- def _generateState(self) -> str:
- """Generate secure state token"""
- return secrets.token_urlsafe(32)
-
- def createDraftEmail(self, recipient: str, subject: str, body: str, attachments: List[Dict[str, Any]] = None) -> bool:
- """Create a draft email using Microsoft Graph API"""
- try:
- user_info, access_token = self.getCurrentUserToken()
- if not user_info or not access_token:
- return False
-
- headers = {
- 'Authorization': f'Bearer {access_token}',
- 'Content-Type': 'application/json'
- }
-
- email_data = {
- 'subject': subject,
- 'body': {
- 'contentType': 'HTML',
- 'content': body
- },
- 'toRecipients': [
- {
- 'emailAddress': {
- 'address': recipient
- }
- }
- ]
- }
-
- if attachments:
- email_data['attachments'] = []
- for attachment in attachments:
- doc = attachment.get('document', {})
- file_name = attachment.get('name', 'attachment.file')
-
- file_content = doc.get('data')
- if not file_content:
- continue
-
- mime_type = doc.get('mimeType', 'application/octet-stream')
- is_base64 = doc.get('base64Encoded', False)
-
- try:
- if is_base64:
- content_bytes = file_content
- else:
- if isinstance(file_content, str):
- content_bytes = base64.b64encode(file_content.encode('utf-8')).decode('utf-8')
- elif isinstance(file_content, bytes):
- content_bytes = base64.b64encode(file_content).decode('utf-8')
- else:
- continue
-
- decoded_size = len(base64.b64decode(content_bytes))
-
- attachment_data = {
- '@odata.type': '#microsoft.graph.fileAttachment',
- 'name': file_name,
- 'contentType': mime_type,
- 'contentBytes': content_bytes,
- 'isInline': False,
- 'size': decoded_size
- }
- email_data['attachments'].append(attachment_data)
-
- except Exception as e:
- logger.error(f"Error processing attachment {file_name}: {str(e)}")
- continue
-
- response = requests.post(
- 'https://graph.microsoft.com/v1.0/me/messages',
- headers=headers,
- json=email_data
- )
-
- return response.status_code >= 200 and response.status_code < 300
-
- except Exception as e:
- logger.error(f"Error creating draft email: {str(e)}")
- return False
-
- def saveMsftToken(self, token_data: Dict[str, Any]) -> bool:
- """
- Save Microsoft token data to the database.
-
- Args:
- token_data: Token data to save
-
- Returns:
- bool: True if successful, False otherwise
- """
- try:
- # Get existing token if any
- existing_tokens = self.db.getRecordset(
- "msftTokens",
- recordFilter={
- "mandateId": self.mandateId,
- "userId": self.userId
- }
- )
-
- if existing_tokens:
- # Update existing token
- token_id = existing_tokens[0]["id"]
- success = self.db.updateRecord(
- "msftTokens",
- token_id,
- token_data
- )
- else:
- # Create new token record
- success = self.db.createRecord(
- "msftTokens",
- token_data
- )
-
- return success
-
- except Exception as e:
- logger.error(f"Error saving Microsoft token: {str(e)}")
- return False
-
- def getMsftToken(self) -> Optional[Dict[str, Any]]:
- """
- Get Microsoft token data for current user.
-
- Returns:
- Optional[Dict[str, Any]]: Token data if found, None otherwise
- """
- try:
- tokens = self.db.getRecordset(
- "msftTokens",
- recordFilter={
- "mandateId": self.mandateId,
- "userId": self.userId
- }
- )
-
- if not tokens:
- return None
-
- return tokens[0]
-
- except Exception as e:
- logger.error(f"Error getting Microsoft token: {str(e)}")
- return None
-
- def getCurrentUserToken(self) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
- """
- Get current user's Microsoft token and user info.
-
- Returns:
- Tuple[Optional[Dict[str, Any]], Optional[str]]: User info and access token
- """
- try:
- token_data = self.getMsftToken()
- if not token_data:
- return None, None
-
- # Check if token needs refresh
- if datetime.now().timestamp() >= token_data["expires_at"]:
- if not token_data.get("refresh_token"):
- return None, None
-
- # Refresh token
- new_token = self.refreshToken(token_data["refresh_token"])
- if not new_token:
- return None, None
-
- # Save new token
- self.saveMsftToken(new_token.model_dump())
- token_data = new_token.model_dump()
-
- return token_data["user_info"], token_data["access_token"]
-
- except Exception as e:
- logger.error(f"Error getting current user token: {str(e)}")
- return None, None
-
- def deleteMsftToken(self) -> bool:
- """
- Delete Microsoft token for current user.
-
- Returns:
- bool: True if successful, False otherwise
- """
- try:
- # Get existing token
- existing_tokens = self.db.getRecordset(
- "msftTokens",
- recordFilter={
- "mandateId": self.mandateId,
- "userId": self.userId
- }
- )
-
- if not existing_tokens:
- return True # No token to delete
-
- # Delete token
- success = self.db.deleteRecord(
- "msftTokens",
- existing_tokens[0]["id"]
- )
-
- return success
-
- except Exception as e:
- logger.error(f"Error deleting Microsoft token: {str(e)}")
- return False
-
- def setUserContext(self, currentUser: Dict[str, Any]):
- """Set user context for the interface"""
- if not currentUser:
- logger.info("Initializing interface without user context")
- return
-
- self.currentUser = currentUser
- self.mandateId = currentUser.get("mandateId")
- self.userId = currentUser.get("id")
-
- if not self.mandateId or not self.userId:
- raise ValueError("Invalid user context: mandateId and id are required")
-
- # Initialize access control with user context
- self.access = MsftAccess(self.currentUser, self.db)
-
- # Update database context
- self.db.updateContext(self.mandateId, self.userId)
-
- logger.debug(f"User context set: userId={self.userId}")
-
-def getRootInterface() -> MsftInterface:
- """
- Returns a MsftInterface instance with root privileges.
- This is used for initial setup and user creation.
- """
- global _rootMsftInterface
-
- if _rootMsftInterface is None:
- # Get root user from gateway
- rootUser = getRootUser()
- _rootMsftInterface = MsftInterface(rootUser)
-
- return _rootMsftInterface
-
-def getInterface(currentUser: Dict[str, Any] = None) -> MsftInterface:
- """
- Returns a MsftInterface instance.
- If currentUser is provided, initializes with user context.
- Otherwise, returns an instance with only database access.
- """
- # Create new instance if not exists
- if "default" not in _msftInterfaces:
- _msftInterfaces["default"] = MsftInterface(currentUser or {})
-
- interface = _msftInterfaces["default"]
-
- if currentUser:
- interface.setUserContext(currentUser)
- else:
- logger.info("Returning interface without user context")
-
- return interface
\ No newline at end of file
diff --git a/modules/interfaces/msftModel.py b/modules/interfaces/msftModel.py
deleted file mode 100644
index 9ea1927d..00000000
--- a/modules/interfaces/msftModel.py
+++ /dev/null
@@ -1,70 +0,0 @@
-"""
-Models for Microsoft authentication and Graph API operations.
-"""
-
-from pydantic import BaseModel, Field
-from typing import Optional, Dict, Any
-from datetime import datetime
-
-class MsftToken(BaseModel):
- """Model for Microsoft OAuth tokens"""
- access_token: str
- refresh_token: Optional[str] = None
- expires_in: int
- token_type: str = "bearer"
- expires_at: float
- user_info: Dict[str, Any]
- mandateId: str
- userId: str
-
-class MsftUserInfo(BaseModel):
- """Model for Microsoft user information"""
- id: str
- email: str
- name: str
- picture: Optional[str] = None # Microsoft Graph doesn't provide profile picture by default
-
-class MsftConfig(BaseModel):
- """Configuration for Microsoft authentication service"""
- client_id: str
- client_secret: str
- redirect_uri: str
- scopes: list[str]
- authority_url: str = "https://login.microsoftonline.com/common"
- token_url: str = "https://login.microsoftonline.com/common/oauth2/v2.0/token"
- user_info_url: str = "https://graph.microsoft.com/v1.0/me"
-
-# Get all attributes of the model
-def getModelAttributes(modelClass):
- return [attr for attr in dir(modelClass)
- if not callable(getattr(modelClass, attr))
- and not attr.startswith('_')
- and attr not in ('metadata', 'query', 'query_class', 'label', 'field_labels')]
-
-class Label(BaseModel):
- """Label for an attribute or a class with support for multiple languages"""
- default: str
- translations: Dict[str, str] = {}
-
- def getLabel(self, language: str = None):
- """Returns the label in the specified language, or the default value if not available"""
- if language and language in self.translations:
- return self.translations[language]
- return self.default
-
-# Response models for Microsoft routes
-class MsftAuthStatus(BaseModel):
- """Response model for Microsoft authentication status"""
- authenticated: bool
- message: Optional[str] = None
- user: Optional[MsftUserInfo] = None
-
-class MsftTokenResponse(BaseModel):
- """Response model for Microsoft token"""
- token: MsftToken
-
-class MsftSaveTokenResponse(BaseModel):
- """Response model for saving Microsoft token"""
- success: bool
- message: str
- token: Optional[MsftToken] = None
\ No newline at end of file
diff --git a/modules/interfaces/gatewayAccess.py b/modules/interfaces/serviceAppAccess.py
similarity index 54%
rename from modules/interfaces/gatewayAccess.py
rename to modules/interfaces/serviceAppAccess.py
index 3b031407..efcbcdee 100644
--- a/modules/interfaces/gatewayAccess.py
+++ b/modules/interfaces/serviceAppAccess.py
@@ -1,13 +1,14 @@
"""
-Access control module for Gateway interface.
-Handles user access management and permission checks.
+Access control for the Application.
"""
from typing import Dict, Any, List, Optional
+from datetime import datetime
+from modules.interfaces.serviceAppModel import UserPrivilege, Session
-class GatewayAccess:
+class AppAccess:
"""
- Access control class for Gateway interface.
+ Access control class for Application interface.
Handles user access management and permission checks.
"""
@@ -16,6 +17,7 @@ class GatewayAccess:
self.currentUser = currentUser
self.mandateId = currentUser.get("mandateId")
self.userId = currentUser.get("id")
+ self.privilege = currentUser.get("privilege", UserPrivilege.USER)
if not self.mandateId or not self.userId:
raise ValueError("Invalid user context: mandateId and userId are required")
@@ -34,19 +36,18 @@ class GatewayAccess:
Returns:
Filtered recordset with access control attributes
"""
- userPrivilege = self.currentUser.get("privilege", "user")
filtered_records = []
# Apply filtering based on privilege
- if userPrivilege == "sysadmin":
+ if self.privilege == UserPrivilege.SYSADMIN:
filtered_records = recordset # System admins see all records
- elif userPrivilege == "admin":
+ elif self.privilege == UserPrivilege.ADMIN:
# Admins see records in their mandate
filtered_records = [r for r in recordset if r.get("mandateId","-") == self.mandateId]
else: # Regular users
# Users only see records they own within their mandate
filtered_records = [r for r in recordset
- if r.get("mandateId","-") == self.mandateId and r.get("_createdBy") == self.userId]
+ if r.get("mandateId","-") == self.mandateId and r.get("createdBy") == self.userId]
# Add access control attributes to each record
for record in filtered_records:
@@ -61,6 +62,22 @@ class GatewayAccess:
record["_hideView"] = False # Everyone can view
record["_hideEdit"] = not self.canModify("users", record_id)
record["_hideDelete"] = not self.canModify("users", record_id)
+ elif table == "sessions":
+ # Only show sessions for the current user or if admin
+ if self.privilege in [UserPrivilege.SYSADMIN, UserPrivilege.ADMIN]:
+ record["_hideView"] = False
+ else:
+ record["_hideView"] = record.get("userId") != self.userId
+ record["_hideEdit"] = True # Sessions can't be edited
+ record["_hideDelete"] = not self.canModify("sessions", record_id)
+ elif table == "auth_events":
+ # Only show auth events for the current user or if admin
+ if self.privilege in [UserPrivilege.SYSADMIN, UserPrivilege.ADMIN]:
+ record["_hideView"] = False
+ else:
+ record["_hideView"] = record.get("userId") != self.userId
+ record["_hideEdit"] = True # Auth events can't be edited
+ record["_hideDelete"] = not self.canModify("auth_events", record_id)
else:
# Default access control for other tables
record["_hideView"] = False
@@ -80,10 +97,8 @@ class GatewayAccess:
Returns:
Boolean indicating permission
"""
- userPrivilege = self.currentUser.get("privilege", "user")
-
# System admins can modify anything
- if userPrivilege == "sysadmin":
+ if self.privilege == UserPrivilege.SYSADMIN:
return True
# Check specific record permissions
@@ -96,25 +111,79 @@ class GatewayAccess:
record = records[0]
# Admins can modify anything in their mandate
- if userPrivilege == "admin" and record.get("mandateId","-") == self.mandateId:
+ if self.privilege == UserPrivilege.ADMIN and record.get("mandateId","-") == self.mandateId:
# Exception: Can't modify Root mandate unless you are a sysadmin
- if table == "mandates" and record.get("initialid") and userPrivilege != "sysadmin":
+ if table == "mandates" and record.get("initialid") and self.privilege != UserPrivilege.SYSADMIN:
return False
return True
# Users can only modify their own records
if (record.get("mandateId","-") == self.mandateId and
- record.get("_createdBy") == self.userId):
+ record.get("createdBy") == self.userId):
return True
return False
else:
# For general table modify permission (e.g., create)
# Admins can create anything in their mandate
- if userPrivilege == "admin":
+ if self.privilege == UserPrivilege.ADMIN:
return True
# Regular users can create most entities
if table == "mandates":
return False # Regular users can't create mandates
- return True
\ No newline at end of file
+ return True
+
+ def validateSession(self, sessionId: str) -> bool:
+ """
+ Validates a user session.
+
+ Args:
+ sessionId: ID of the session to validate
+
+ Returns:
+ Boolean indicating if session is valid
+ """
+ try:
+ # Get session
+ sessions = self.db.getRecordset("sessions", recordFilter={"id": sessionId})
+ if not sessions:
+ return False
+
+ session = sessions[0]
+
+ # Check if session is expired
+ if datetime.now() > session["expiresAt"]:
+ return False
+
+ # Check if user has permission to access this session
+ if session["userId"] != self.userId and self.privilege not in [UserPrivilege.SYSADMIN, UserPrivilege.ADMIN]:
+ return False
+
+ # Update last activity
+ self.db.recordModify("sessions", sessionId, {
+ "lastActivity": datetime.now()
+ })
+
+ return True
+
+ except Exception as e:
+ logger.error(f"Error validating session: {str(e)}")
+ return False
+
+ def canAccessAuthEvents(self, userId: str) -> bool:
+ """
+ Checks if the current user can access auth events for a specific user.
+
+ Args:
+ userId: ID of the user whose auth events to check
+
+ Returns:
+ Boolean indicating permission
+ """
+ # System admins and admins can access all auth events
+ if self.privilege in [UserPrivilege.SYSADMIN, UserPrivilege.ADMIN]:
+ return True
+
+ # Regular users can only access their own auth events
+ return userId == self.userId
\ No newline at end of file
diff --git a/modules/interfaces/gatewayInterface.py b/modules/interfaces/serviceAppClass.py
similarity index 54%
rename from modules/interfaces/gatewayInterface.py
rename to modules/interfaces/serviceAppClass.py
index 1d4268eb..ae298165 100644
--- a/modules/interfaces/gatewayInterface.py
+++ b/modules/interfaces/serviceAppClass.py
@@ -3,7 +3,7 @@ Interface to the Gateway system.
Manages users and mandates for authentication.
"""
-from datetime import datetime
+from datetime import datetime, timedelta
import os
import logging
from typing import Dict, Any, List, Optional, Union
@@ -13,12 +13,15 @@ from passlib.context import CryptContext
from modules.connectors.connectorDbJson import DatabaseConnector
from modules.shared.configuration import APP_CONFIG
-from modules.interfaces.gatewayAccess import GatewayAccess
-from modules.interfaces.gatewayModel import User, Mandate, UserInDB, UserConnection
+from modules.interfaces.serviceAppAccess import AppAccess
+from modules.interfaces.serviceAppModel import (
+ User, Mandate, UserInDB, UserConnection,
+ Session, AuthEvent, AuthAuthority, UserPrivilege,
+ ConnectionStatus
+)
logger = logging.getLogger(__name__)
-
# Singleton factory for GatewayInterface instances per context
_gatewayInterfaces = {}
@@ -28,7 +31,6 @@ _rootGatewayInterface = None
# Password-Hashing
pwdContext = CryptContext(schemes=["argon2"], deprecated="auto")
-
class GatewayInterface:
"""
Interface to the Gateway system.
@@ -40,6 +42,7 @@ class GatewayInterface:
# Initialize variables
self.currentUser = currentUser
self.userId = currentUser.get("id") if currentUser else None
+ self.mandateId = currentUser.get("mandateId") if currentUser else None
self.access = None # Will be set when user context is provided
# Initialize database
@@ -60,26 +63,27 @@ class GatewayInterface:
self.currentUser = currentUser
self.userId = currentUser.get("id")
+ self.mandateId = currentUser.get("mandateId")
- if not self.userId:
- raise ValueError("Invalid user context: id is required")
+ if not self.userId or not self.mandateId:
+ raise ValueError("Invalid user context: id and mandateId are required")
# Add language settings
self.userLanguage = currentUser.get("language", "en") # Default user language
# Initialize access control with user context
- self.access = GatewayAccess(self.currentUser, self.db)
+ self.access = AppAccess(self.currentUser, self.db)
- logger.debug(f"User context set: userId={self.userId}")
+ logger.debug(f"User context set: userId={self.userId}, mandateId={self.mandateId}")
def _initializeDatabase(self):
"""Initializes the database connection."""
try:
# Get configuration values with defaults
- dbHost = APP_CONFIG.get("DB_GATEWAY_HOST", "data")
- dbDatabase = APP_CONFIG.get("DB_GATEWAY_DATABASE", "gateway")
- dbUser = APP_CONFIG.get("DB_GATEWAY_USER")
- dbPassword = APP_CONFIG.get("DB_GATEWAY_PASSWORD_SECRET")
+ dbHost = APP_CONFIG.get("DB_APP_HOST", "_no_config_default_data")
+ dbDatabase = APP_CONFIG.get("DB_APP_DATABASE", "app")
+ dbUser = APP_CONFIG.get("DB_APP_USER")
+ dbPassword = APP_CONFIG.get("DB_APP_PASSWORD_SECRET")
# Ensure the database directory exists
os.makedirs(dbHost, exist_ok=True)
@@ -97,6 +101,7 @@ class GatewayInterface:
raise
def _initRecords(self):
+ """Initialize standard records if they don't exist."""
self._initRootMandate()
self._initAdminUser()
@@ -106,18 +111,18 @@ class GatewayInterface:
mandates = self.db.getRecordset("mandates")
if existingMandateId is None or not mandates:
logger.info("Creating Root mandate")
- rootMandate = {
- "name": "Root",
- "language": "en"
- }
- createdMandate = self.db.recordCreate("mandates", rootMandate)
+ rootMandate = Mandate(
+ name="Root",
+ language="en"
+ )
+ createdMandate = self.db.recordCreate("mandates", rootMandate.model_dump())
logger.info(f"Root mandate created with ID {createdMandate['id']}")
# Register the initial ID
self.db._registerInitialId("mandates", createdMandate['id'])
# Update mandate context
- self.currentUser["mandateId"] = createdMandate['id']
+ self.mandateId = createdMandate['id']
def _initAdminUser(self):
"""Creates the Admin user if it doesn't exist."""
@@ -125,18 +130,19 @@ class GatewayInterface:
users = self.db.getRecordset("users")
if existingUserId is None or not users:
logger.info("Creating Admin user")
- adminUser = {
- "mandateId": self.getInitialId("mandates"),
- "username": "admin",
- "email": "admin@example.com",
- "fullName": "Administrator",
- "disabled": False,
- "language": "en",
- "privilege": "sysadmin",
- "authenticationAuthority": "local",
- "hashedPassword": self._getPasswordHash("The 1st Poweron Admin") # Use a secure password in production!
- }
- createdUser = self.db.recordCreate("users", adminUser)
+ adminUser = UserInDB(
+ mandateId=self.getInitialId("mandates"),
+ username="admin",
+ email="admin@example.com",
+ fullName="Administrator",
+ disabled=False,
+ language="en",
+ privilege=UserPrivilege.SYSADMIN,
+ authenticationAuthority=AuthAuthority.LOCAL,
+ hashedPassword=self._getPasswordHash("The 1st Poweron Admin"), # Use a secure password in production!
+ connections=[]
+ )
+ createdUser = self.db.recordCreate("users", adminUser.model_dump())
logger.info(f"Admin user created with ID {createdUser['id']}")
# Register the initial ID
@@ -185,97 +191,6 @@ class GatewayInterface:
"""Checks if the password matches the hash."""
return pwdContext.verify(plainPassword, hashedPassword)
- # Mandate methods
-
- def getAllMandates(self) -> List[Mandate]:
- """Returns mandates based on user access level."""
- allMandates = self.db.getRecordset("mandates")
- filteredMandates = self._uam("mandates", allMandates)
- return [Mandate(**mandate) for mandate in filteredMandates]
-
- def getMandate(self, mandateId: str) -> Optional[Mandate]:
- """Returns a mandate by ID if user has access."""
- mandates = self.db.getRecordset("mandates", recordFilter={"id": mandateId})
- if not mandates:
- return None
-
- filteredMandates = self._uam("mandates", mandates)
- if not filteredMandates:
- return None
-
- return Mandate(**filteredMandates[0])
-
- def createMandate(self, name: str, language: str = "en") -> Mandate:
- """Creates a new mandate if user has permission."""
- if not self._canModify("mandates"):
- raise PermissionError("No permission to create mandates")
-
- # Create and validate mandate data using Pydantic model
- mandateData = Mandate(
- name=name,
- language=language
- )
-
- # Convert to dict for database storage
- created = self.db.recordCreate("mandates", mandateData.model_dump())
- return Mandate(**created)
-
- def updateMandate(self, mandateId: str, mandateData: Dict[str, Any]) -> Mandate:
- """Updates a mandate if user has access."""
- # Check if the mandate exists and user has access
- mandate = self.getMandate(mandateId)
- if not mandate:
- raise ValueError(f"Mandate with ID {mandateId} not found")
-
- if not self._canModify("mandates", mandateId):
- raise PermissionError(f"No permission to update mandate {mandateId}")
-
- # Validate update data using Pydantic model
- try:
- # Create a new Mandate instance with existing data plus updates
- updatedMandate = Mandate(**{**mandate.model_dump(), **mandateData})
- except Exception as e:
- raise ValueError(f"Invalid mandate data: {str(e)}")
-
- # Update the mandate
- updated = self.db.recordModify("mandates", mandateId, updatedMandate.model_dump())
- return Mandate(**updated)
-
- def deleteMandate(self, mandateId: str) -> bool:
- """
- Deletes a mandate and all associated users and data if user has permission.
- """
- # Check if the mandate exists and user has access
- mandate = self.getMandate(mandateId)
- if not mandate:
- return False
-
- if not self._canModify("mandates", mandateId):
- raise PermissionError(f"No permission to delete mandate {mandateId}")
-
- # Check if it's the initial mandate
- initialMandateId = self.getInitialId("mandates")
- if initialMandateId is not None and mandateId == initialMandateId:
- logger.warning(f"Attempt to delete the Root mandate was prevented")
- return False
-
- # Find all users of the mandate
- users = self.getUsersByMandate(mandateId)
-
- # Delete all users of the mandate and their associated data
- for user in users:
- self.deleteUser(user["id"])
-
- # Delete the mandate
- success = self.db.recordDelete("mandates", mandateId)
-
- if success:
- logger.info(f"Mandate with ID {mandateId} was successfully deleted")
- else:
- logger.error(f"Error deleting mandate with ID {mandateId}")
-
- return success
-
# User methods
def getAllUsers(self) -> List[User]:
@@ -283,12 +198,8 @@ class GatewayInterface:
allUsers = self.db.getRecordset("users")
filteredUsers = self._uam("users", allUsers)
- # Remove password hashes
- for user in filteredUsers:
- if "hashedPassword" in user:
- del user["hashedPassword"]
-
- return [User(**user) for user in filteredUsers]
+ # Convert to User models
+ return [User.from_dict(user) for user in filteredUsers]
def getUsersByMandate(self, mandateId: str) -> List[User]:
"""Returns users for a specific mandate if user has access."""
@@ -296,12 +207,8 @@ class GatewayInterface:
users = self.db.getRecordset("users", recordFilter={"mandateId": mandateId})
filteredUsers = self._uam("users", users)
- # Remove password hashes
- for user in filteredUsers:
- if "hashedPassword" in user:
- del user["hashedPassword"]
-
- return [User(**user) for user in filteredUsers]
+ # Convert to User models
+ return [User.from_dict(user) for user in filteredUsers]
def getUserByUsername(self, username: str) -> Optional[User]:
"""Returns a user by username."""
@@ -315,8 +222,7 @@ class GatewayInterface:
for user in users:
if user.get("username") == username:
logger.info(f"Found user with username {username}")
- logger.debug(f"User fields: {list(user.keys())}")
- return User(**user)
+ return User.from_dict(user)
logger.info(f"No user found with username {username}")
return None
@@ -335,17 +241,10 @@ class GatewayInterface:
if not filteredUsers:
return None
- user = filteredUsers[0]
-
- # Remove password hash
- if "hashedPassword" in user:
- userCopy = user.copy()
- del userCopy["hashedPassword"]
- return User(**userCopy)
-
- return User(**user)
+ return User.from_dict(filteredUsers[0])
- def addUserConnection(self, userId: str, authority: str, externalId: str, externalUsername: str, externalEmail: Optional[str] = None) -> UserConnection:
+ def addUserConnection(self, userId: str, authority: AuthAuthority, externalId: str,
+ externalUsername: str, externalEmail: Optional[str] = None) -> UserConnection:
"""Add a new connection to an external service for a user"""
try:
# Get user
@@ -363,7 +262,8 @@ class GatewayInterface:
authority=authority,
externalId=externalId,
externalUsername=externalUsername,
- externalEmail=externalEmail
+ externalEmail=externalEmail,
+ status=ConnectionStatus.ACTIVE
)
# Add connection to user
@@ -396,8 +296,8 @@ class GatewayInterface:
logger.error(f"Error removing user connection: {str(e)}")
raise ValueError(f"Failed to remove user connection: {str(e)}")
- def authenticateUser(self, username: str, password: str = None, authority: str = "local", external_token: str = None) -> Optional[User]:
- """Authenticates a user by username and password or external authority."""
+ def authenticateLocalUser(self, username: str, password: str) -> Optional[User]:
+ """Authenticates a user by username and password using local authentication."""
# Clear the users table from cache and reload it
if "users" in self.db._tablesCache:
del self.db._tablesCache["users"]
@@ -412,83 +312,41 @@ class GatewayInterface:
if user.disabled:
raise ValueError("User is disabled")
- # Handle authentication based on authority
- if authority == "local":
- if not password:
- raise ValueError("Password is required for local authentication")
- # Get the full user record with password hash for verification
- userWithPassword = UserInDB(**self.db.getRecordset("users", recordFilter={"id": user.id})[0])
- if not self._verifyPassword(password, userWithPassword.hashedPassword):
- raise ValueError("Invalid password")
- elif authority in ["microsoft", "google"]: # Support for multiple external auth providers
- # Verify that the user has the correct authentication authority
- if user.authenticationAuthority != authority:
- raise ValueError(f"User does not have {authority} authentication enabled")
+ # Verify that the user has local authentication enabled
+ if user.authenticationAuthority != AuthAuthority.LOCAL:
+ raise ValueError("User does not have local authentication enabled")
- # Verify that the user has a valid connection for this authority
- if not any(conn.authority == authority for conn in user.connections):
- raise ValueError(f"User does not have a valid {authority} connection")
-
- # Verify the external token
- if not external_token:
- raise ValueError(f"External token is required for {authority} authentication")
-
- # Get the appropriate auth service
- if authority == "microsoft":
- from .msftInterface import getInterface as getMsftInterface
- auth_service = getMsftInterface({"_mandateId": user._mandateId, "id": user.id})
- elif authority == "google":
- from .googleInterface import getInterface as getGoogleInterface
- auth_service = getGoogleInterface({"_mandateId": user._mandateId, "id": user.id})
- else:
- raise ValueError(f"Unsupported authentication authority: {authority}")
-
- # Verify the token
- if not auth_service.verifyToken(external_token):
- raise ValueError(f"Invalid or expired {authority} token")
- else:
- raise ValueError(f"Unknown authentication authority: {authority}")
+ # Get the full user record with password hash for verification
+ userWithPassword = UserInDB(**self.db.getRecordset("users", recordFilter={"id": user.id})[0])
+ if not self._verifyPassword(password, userWithPassword.hashedPassword):
+ raise ValueError("Invalid password")
return user
- def createUser(self, username: str, password: str = None, email: str = None, fullName: str = None,
- language: str = "en", disabled: bool = False,
- privilege: str = "user", authenticationAuthority: str = "local",
- externalId: str = None, externalUsername: str = None, externalEmail: str = None) -> User:
+ def createUser(self, username: str, password: str = None, email: str = None,
+ fullName: str = None, language: str = "en", disabled: bool = False,
+ privilege: UserPrivilege = UserPrivilege.USER,
+ authenticationAuthority: AuthAuthority = AuthAuthority.LOCAL,
+ externalId: str = None, externalUsername: str = None,
+ externalEmail: str = None) -> User:
"""Create a new user with optional external connection"""
try:
- # Validate username
- if not username:
- raise ValueError("Username is required")
-
- # Check if user already exists with the same authentication authority
- existingUser = self.getUserByUsername(username)
- if existingUser and existingUser.authenticationAuthority == authenticationAuthority:
- raise ValueError(f"Username '{username}' already exists with {authenticationAuthority} authentication")
-
- # Validate password for local authentication
- if authenticationAuthority == "local":
- if not password:
- raise ValueError("Password is required for local authentication")
- if len(password) < 8:
- raise ValueError("Password must be at least 8 characters long")
-
# Create user data using UserInDB model
userData = UserInDB(
username=username,
email=email,
fullName=fullName,
language=language,
- mandateId=self.currentUser.get("mandateId"),
+ mandateId=self.mandateId,
disabled=disabled,
privilege=privilege,
authenticationAuthority=authenticationAuthority,
- hashedPassword=self._getPasswordHash(password) if authenticationAuthority == "local" else None,
+ hashedPassword=self._getPasswordHash(password) if password else None,
connections=[]
)
# Create user record
- createdRecord = self.db.recordCreate("users", userData.model_dump(exclude_none=True))
+ createdRecord = self.db.recordCreate("users", userData.to_dict())
if not createdRecord or not createdRecord.get("id"):
raise ValueError("Failed to create user record")
@@ -505,16 +363,13 @@ class GatewayInterface:
# Get created user using the returned ID
createdUser = self.db.getRecordset("users", recordFilter={"id": createdRecord["id"]})
if not createdUser or len(createdUser) == 0:
- # Try to get user by username as fallback
- createdUser = self.db.getRecordset("users", recordFilter={"username": userData.username})
- if not createdUser or len(createdUser) == 0:
- raise ValueError("Failed to retrieve created user")
+ raise ValueError("Failed to retrieve created user")
# Clear users table from cache
if hasattr(self.db, '_tablesCache') and "users" in self.db._tablesCache:
del self.db._tablesCache["users"]
- return User(**createdUser[0])
+ return User.from_dict(createdUser[0])
except ValueError as e:
logger.error(f"Error creating user: {str(e)}")
@@ -522,49 +377,34 @@ class GatewayInterface:
except Exception as e:
logger.error(f"Unexpected error creating user: {str(e)}")
raise ValueError(f"Failed to create user: {str(e)}")
-
- def updateUser(self, userId: str, userData: Dict[str, Any]) -> User:
- """Updates a user if current user has permission."""
- # Check if the user exists and current user has access
- user = self.getUser(userId)
- if not user:
- # Try to get the raw user record for admin access check
- users = self.db.getRecordset("users", recordFilter={"id": userId})
- if not users:
- raise ValueError(f"User with ID {userId} not found")
-
- # Check if current user is admin/sysadmin
- if not self._canModify("users", userId):
- raise PermissionError(f"No permission to update user {userId}")
-
- user = users[0]
-
- # Check privilege escalation
- if "privilege" in userData:
- currentPrivilege = self.currentUser.get("privilege")
- targetPrivilege = userData["privilege"]
-
- if (targetPrivilege == "sysadmin" and currentPrivilege != "sysadmin") or (
- targetPrivilege == "admin" and currentPrivilege == "user"):
- raise PermissionError(f"Cannot escalate privilege to {targetPrivilege}")
-
- # If the password is being changed, hash it
- if "password" in userData:
- userData["hashedPassword"] = self._getPasswordHash(userData["password"])
- del userData["password"]
-
+
+ def updateUser(self, userId: str, updateData: Dict[str, Any]) -> User:
+ """Update a user's information"""
try:
- # Create a new UserInDB instance with existing data plus updates
- updatedUser = UserInDB(**{**user.model_dump(), **userData})
+ # Get user
+ user = self.getUser(userId)
+ if not user:
+ raise ValueError(f"User {userId} not found")
+
+ # Update user data using model
+ updatedData = user.model_dump()
+ updatedData.update(updateData)
+ updatedUser = User.from_dict(updatedData)
+
+ # Update user record
+ self.db.recordModify("users", userId, updatedUser.to_dict())
+
+ # Get updated user
+ updatedUser = self.getUser(userId)
+ if not updatedUser:
+ raise ValueError("Failed to retrieve updated user")
+
+ return updatedUser
+
except Exception as e:
- raise ValueError(f"Invalid user data: {str(e)}")
-
- # Update the user
- updated = self.db.recordModify("users", userId, updatedUser.model_dump(exclude_none=True))
-
- # Return User model without password hash
- return User(**updated)
-
+ logger.error(f"Error updating user: {str(e)}")
+ raise ValueError(f"Failed to update user: {str(e)}")
+
def disableUser(self, userId: str) -> User:
"""Disables a user if current user has permission."""
return self.updateUser(userId, {"disabled": True})
@@ -575,72 +415,121 @@ class GatewayInterface:
def _deleteUserReferencedData(self, userId: str) -> None:
"""Deletes all data associated with a user."""
- # Delete user attributes
try:
- attributes = self.db.getRecordset("attributes", recordFilter={"createdBy": userId})
- for attribute in attributes:
- self.db.recordDelete("attributes", attribute["id"])
- except Exception as e:
- logger.error(f"Error deleting attributes for user {userId}: {e}")
-
- logger.info(f"All referenced data for user {userId} has been deleted")
-
- def deleteUser(self, userId: str) -> bool:
- """Deletes a user and all associated data if current user has permission."""
- # Check if the user exists
- users = self.db.getRecordset("users", recordFilter={"id": userId})
- if not users:
- return False
+ # Delete user sessions
+ sessions = self.db.getRecordset("sessions", recordFilter={"userId": userId})
+ for session in sessions:
+ self.db.recordDelete("sessions", session["id"])
+ logger.debug(f"Deleted session {session['id']} for user {userId}")
- # Check if current user has permission
- if not self._canModify("users", userId):
- raise PermissionError(f"No permission to delete user {userId}")
-
- # Check if it's the initial user
- initialUserId = self.getInitialId("users")
- if initialUserId is not None and userId == initialUserId:
- logger.warning("Attempt to delete the Root Admin was prevented")
- return False
-
- # Delete all data associated with the user
- self._deleteUserReferencedData(userId)
-
- # Delete the user
- success = self.db.recordDelete("users", userId)
-
- if success:
- logger.info(f"User with ID {userId} was successfully deleted")
- else:
- logger.error(f"Error deleting user with ID {userId}")
-
- return success
-
- def setupLocalAuth(self, userId: str, password: str) -> User:
- """Set up local authentication for a user who registered with Microsoft"""
- try:
- # Get user
+ # Delete user auth events
+ events = self.db.getRecordset("auth_events", recordFilter={"userId": userId})
+ for event in events:
+ self.db.recordDelete("auth_events", event["id"])
+ logger.debug(f"Deleted auth event {event['id']} for user {userId}")
+
+ # Delete user connections
user = self.getUser(userId)
- if not user:
- raise ValueError(f"User {userId} not found")
-
- # Validate password
- if not password:
- raise ValueError("Password is required")
- if len(password) < 8:
- raise ValueError("Password must be at least 8 characters long")
-
- # Update user with local password
- userData = {
- "hashedPassword": self._getPasswordHash(password),
- "authenticationAuthority": "local" # Change to local auth
- }
+ if user and user.connections:
+ for conn in user.connections:
+ self.removeUserConnection(userId, conn.id)
+ logger.debug(f"Deleted connection {conn.id} for user {userId}")
- return self.updateUser(userId, userData)
+ logger.info(f"All referenced data for user {userId} has been deleted")
except Exception as e:
- logger.error(f"Error setting up local authentication: {str(e)}")
- raise ValueError(f"Failed to set up local authentication: {str(e)}")
+ logger.error(f"Error deleting referenced data for user {userId}: {str(e)}")
+ raise
+ # Mandate methods
+
+ def getAllMandates(self) -> List[Mandate]:
+ """Returns all mandates based on user access level."""
+ allMandates = self.db.getRecordset("mandates")
+ filteredMandates = self._uam("mandates", allMandates)
+ return [Mandate.from_dict(mandate) for mandate in filteredMandates]
+
+ def getMandate(self, mandateId: str) -> Optional[Mandate]:
+ """Returns a mandate by ID if user has access."""
+ mandates = self.db.getRecordset("mandates", recordFilter={"id": mandateId})
+ if not mandates:
+ return None
+
+ filteredMandates = self._uam("mandates", mandates)
+ if not filteredMandates:
+ return None
+
+ return Mandate.from_dict(filteredMandates[0])
+
+ def createMandate(self, name: str, language: str = "en") -> Mandate:
+ """Creates a new mandate if user has permission."""
+ if not self._canModify("mandates"):
+ raise PermissionError("No permission to create mandates")
+
+ # Create mandate data using model
+ mandateData = Mandate(
+ name=name,
+ language=language
+ )
+
+ # Create mandate record
+ createdRecord = self.db.recordCreate("mandates", mandateData.to_dict())
+ if not createdRecord or not createdRecord.get("id"):
+ raise ValueError("Failed to create mandate record")
+
+ return Mandate.from_dict(createdRecord)
+
+ def updateMandate(self, mandateId: str, updateData: Dict[str, Any]) -> Mandate:
+ """Updates a mandate if user has access."""
+ try:
+ # Get mandate
+ mandate = self.getMandate(mandateId)
+ if not mandate:
+ raise ValueError(f"Mandate {mandateId} not found")
+
+ # Update mandate data using model
+ updatedData = mandate.model_dump()
+ updatedData.update(updateData)
+ updatedMandate = Mandate.from_dict(updatedData)
+
+ # Update mandate record
+ self.db.recordModify("mandates", mandateId, updatedMandate.to_dict())
+
+ # Get updated mandate
+ updatedMandate = self.getMandate(mandateId)
+ if not updatedMandate:
+ raise ValueError("Failed to retrieve updated mandate")
+
+ return updatedMandate
+
+ except Exception as e:
+ logger.error(f"Error updating mandate: {str(e)}")
+ raise ValueError(f"Failed to update mandate: {str(e)}")
+
+ def deleteMandate(self, mandateId: str) -> bool:
+ """Deletes a mandate if user has access."""
+ try:
+ # Check if mandate exists and user has access
+ mandate = self.getMandate(mandateId)
+ if not mandate:
+ return False
+
+ if not self._canModify("mandates", mandateId):
+ raise PermissionError(f"No permission to delete mandate {mandateId}")
+
+ # Check if mandate has users
+ users = self.getUsersByMandate(mandateId)
+ if users:
+ raise ValueError(f"Cannot delete mandate {mandateId} with existing users")
+
+ # Delete mandate
+ return self.db.recordDelete("mandates", mandateId)
+
+ except Exception as e:
+ logger.error(f"Error deleting mandate: {str(e)}")
+ raise ValueError(f"Failed to delete mandate: {str(e)}")
+
+# Public Methods
def getInterface(currentUser: Dict[str, Any]) -> GatewayInterface:
"""
diff --git a/modules/interfaces/serviceAppModel.py b/modules/interfaces/serviceAppModel.py
new file mode 100644
index 00000000..2ed97791
--- /dev/null
+++ b/modules/interfaces/serviceAppModel.py
@@ -0,0 +1,211 @@
+"""
+Models for User Service
+"""
+
+import uuid
+from pydantic import BaseModel, Field, EmailStr
+from typing import List, Dict, Any, Optional
+from datetime import datetime
+from enum import Enum
+
+from modules.shared.attributeUtils import Label, BaseModelWithUI
+
+class AuthAuthority(str, Enum):
+ """Authentication authorities"""
+ LOCAL = "local"
+ MICROSOFT = "microsoft"
+ GOOGLE = "google"
+ EXTERNAL = "external"
+
+class UserPrivilege(str, Enum):
+ """User privilege levels"""
+ SYSADMIN = "sysadmin"
+ ADMIN = "admin"
+ USER = "user"
+
+class ConnectionStatus(str, Enum):
+ """Connection status"""
+ ACTIVE = "active"
+ EXPIRED = "expired"
+ REVOKED = "revoked"
+ PENDING = "pending"
+
+class Mandate(BaseModelWithUI):
+ """Data model for a mandate"""
+ id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the mandate")
+ name: str = Field(description="Name of the mandate")
+ language: str = Field(default="en", description="Default language of the mandate")
+
+ label: Label = Field(
+ default=Label(default="Mandate", translations={"en": "Mandate", "fr": "Mandat"}),
+ description="Label for the class"
+ )
+
+ fieldLabels: Dict[str, Label] = {
+ "id": Label(default="ID", translations={}),
+ "name": Label(default="Name of the mandate", translations={"en": "Mandate name", "fr": "Nom du mandat"}),
+ "language": Label(default="Language", translations={"en": "Language", "fr": "Langue"})
+ }
+
+ @classmethod
+ def get_validations(cls) -> Dict[str, Any]:
+ """Get validation rules for frontend"""
+ return {
+ "name": {
+ "required": True,
+ "minLength": 2,
+ "maxLength": 100
+ },
+ "language": {
+ "required": True,
+ "pattern": "^[a-z]{2}$"
+ }
+ }
+
+class UserConnection(BaseModelWithUI):
+ """Data model for a user's connection to an external service"""
+ id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the connection")
+ authority: AuthAuthority = Field(description="Authentication authority")
+ externalId: str = Field(description="User ID in the external system")
+ externalUsername: str = Field(description="Username in the external system")
+ externalEmail: Optional[EmailStr] = Field(None, description="Email in the external system")
+ status: ConnectionStatus = Field(default=ConnectionStatus.ACTIVE, description="Connection status")
+ connectedAt: datetime = Field(default_factory=datetime.now, description="When the connection was established")
+ lastChecked: datetime = Field(default_factory=datetime.now, description="When the connection was last verified")
+ expiresAt: Optional[datetime] = Field(None, description="When the connection expires")
+
+ label: Label = Field(
+ default=Label(default="User Connection", translations={"en": "User Connection", "fr": "Connexion utilisateur"}),
+ description="Label for the class"
+ )
+
+ fieldLabels: Dict[str, Label] = {
+ "id": Label(default="ID", translations={}),
+ "authority": Label(default="Authority", translations={"en": "Authority", "fr": "Autorité"}),
+ "externalId": Label(default="External ID", translations={"en": "External ID", "fr": "ID externe"}),
+ "externalUsername": Label(default="External Username", translations={"en": "External Username", "fr": "Nom d'utilisateur externe"}),
+ "externalEmail": Label(default="External Email", translations={"en": "External Email", "fr": "Email externe"}),
+ "status": Label(default="Status", translations={"en": "Status", "fr": "Statut"}),
+ "connectedAt": Label(default="Connected At", translations={"en": "Connected At", "fr": "Connecté le"}),
+ "lastChecked": Label(default="Last Checked", translations={"en": "Last Checked", "fr": "Dernière vérification"}),
+ "expiresAt": Label(default="Expires At", translations={"en": "Expires At", "fr": "Expire le"})
+ }
+
+class Session(BaseModelWithUI):
+ """Data model for user sessions"""
+ id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique session ID")
+ userId: str = Field(description="ID of the user")
+ tokenId: str = Field(description="ID of the associated token")
+ lastActivity: datetime = Field(default_factory=datetime.now, description="Last activity timestamp")
+ expiresAt: datetime = Field(description="When the session expires")
+ ipAddress: Optional[str] = Field(None, description="IP address of the session")
+ userAgent: Optional[str] = Field(None, description="User agent of the session")
+
+ label: Label = Field(
+ default=Label(default="Session", translations={"en": "Session", "fr": "Session"}),
+ description="Label for the class"
+ )
+
+ fieldLabels: Dict[str, Label] = {
+ "id": Label(default="ID", translations={}),
+ "userId": Label(default="User ID", translations={"en": "User ID", "fr": "ID utilisateur"}),
+ "tokenId": Label(default="Token ID", translations={"en": "Token ID", "fr": "ID du token"}),
+ "lastActivity": Label(default="Last Activity", translations={"en": "Last Activity", "fr": "Dernière activité"}),
+ "expiresAt": Label(default="Expires At", translations={"en": "Expires At", "fr": "Expire le"}),
+ "ipAddress": Label(default="IP Address", translations={"en": "IP Address", "fr": "Adresse IP"}),
+ "userAgent": Label(default="User Agent", translations={"en": "User Agent", "fr": "User Agent"})
+ }
+
+class AuthEvent(BaseModelWithUI):
+ """Data model for authentication events"""
+ id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique event ID")
+ userId: str = Field(description="ID of the user")
+ eventType: str = Field(description="Type of event (login, logout, etc.)")
+ details: Dict[str, Any] = Field(description="Event details")
+ timestamp: datetime = Field(default_factory=datetime.now, description="When the event occurred")
+ ipAddress: Optional[str] = Field(None, description="IP address of the event")
+ userAgent: Optional[str] = Field(None, description="User agent of the event")
+
+ label: Label = Field(
+ default=Label(default="Auth Event", translations={"en": "Auth Event", "fr": "Événement d'authentification"}),
+ description="Label for the class"
+ )
+
+ fieldLabels: Dict[str, Label] = {
+ "id": Label(default="ID", translations={}),
+ "userId": Label(default="User ID", translations={"en": "User ID", "fr": "ID utilisateur"}),
+ "eventType": Label(default="Event Type", translations={"en": "Event Type", "fr": "Type d'événement"}),
+ "details": Label(default="Details", translations={"en": "Details", "fr": "Détails"}),
+ "timestamp": Label(default="Timestamp", translations={"en": "Timestamp", "fr": "Horodatage"}),
+ "ipAddress": Label(default="IP Address", translations={"en": "IP Address", "fr": "Adresse IP"}),
+ "userAgent": Label(default="User Agent", translations={"en": "User Agent", "fr": "User Agent"})
+ }
+
+class User(BaseModelWithUI):
+ """Data model for a user"""
+ id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the user")
+ username: str = Field(description="Username for login")
+ email: Optional[EmailStr] = Field(None, description="Email address of the user")
+ fullName: Optional[str] = Field(None, description="Full name of the user")
+ language: str = Field(default="en", description="Preferred language of the user")
+ disabled: bool = Field(default=False, description="Indicates whether the user is disabled")
+ privilege: UserPrivilege = Field(default=UserPrivilege.USER, description="Permission level")
+ authenticationAuthority: AuthAuthority = Field(default=AuthAuthority.LOCAL, description="Primary authentication authority")
+ mandateId: str = Field(description="ID of the mandate this user belongs to")
+ connections: List[UserConnection] = Field(default_factory=list, description="List of external service connections")
+
+ label: Label = Field(
+ default=Label(default="User", translations={"en": "User", "fr": "Utilisateur"}),
+ description="Label for the class"
+ )
+
+ fieldLabels: Dict[str, Label] = {
+ "id": Label(default="ID", translations={}),
+ "username": Label(default="Username", translations={"en": "Username", "fr": "Nom d'utilisateur"}),
+ "email": Label(default="Email", translations={"en": "Email", "fr": "Email"}),
+ "fullName": Label(default="Full Name", translations={"en": "Full Name", "fr": "Nom complet"}),
+ "language": Label(default="Language", translations={"en": "Language", "fr": "Langue"}),
+ "disabled": Label(default="Disabled", translations={"en": "Disabled", "fr": "Désactivé"}),
+ "privilege": Label(default="Privilege", translations={"en": "Privilege", "fr": "Privilège"}),
+ "authenticationAuthority": Label(default="Auth Authority", translations={"en": "Auth Authority", "fr": "Autorité d'authentification"}),
+ "mandateId": Label(default="Mandate ID", translations={"en": "Mandate ID", "fr": "ID de mandat"}),
+ "connections": Label(default="Connections", translations={"en": "Connections", "fr": "Connexions"})
+ }
+
+ @classmethod
+ def get_validations(cls) -> Dict[str, Any]:
+ """Get validation rules for frontend"""
+ return {
+ "username": {
+ "required": True,
+ "minLength": 3,
+ "maxLength": 50,
+ "pattern": "^[a-zA-Z0-9_-]+$"
+ },
+ "email": {
+ "required": False,
+ "pattern": "^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$"
+ },
+ "fullName": {
+ "required": False,
+ "maxLength": 100
+ },
+ "language": {
+ "required": True,
+ "pattern": "^[a-z]{2}$"
+ }
+ }
+
+class UserInDB(User):
+ """Extended user class with password hash"""
+ hashedPassword: Optional[str] = Field(None, description="Hash of the user password")
+
+ label: Label = Field(
+ default=Label(default="User Access", translations={"en": "User Access", "fr": "Accès de l'utilisateur"}),
+ description="Label for the class"
+ )
+
+ fieldLabels: Dict[str, Label] = {
+ "hashedPassword": Label(default="Password hash", translations={"en": "Password hash", "fr": "Hachage de mot de passe"})
+ }
+
\ No newline at end of file
diff --git a/modules/interfaces/serviceAppTokens.py b/modules/interfaces/serviceAppTokens.py
new file mode 100644
index 00000000..7b2a8ea4
--- /dev/null
+++ b/modules/interfaces/serviceAppTokens.py
@@ -0,0 +1,52 @@
+"""
+Token models and management for external authentication services.
+"""
+
+from pydantic import BaseModel
+from typing import Optional
+from datetime import datetime
+
+class GoogleToken(BaseModel):
+ """Google OAuth token model"""
+ access_token: str
+ token_type: str = "bearer"
+ expires_at: float
+ refresh_token: Optional[str] = None
+
+class MsftToken(BaseModel):
+ """Microsoft OAuth token model"""
+ access_token: str
+ token_type: str = "bearer"
+ expires_at: float
+ refresh_token: Optional[str] = None
+
+class LocalToken(BaseModel):
+ """Local authentication token model"""
+ access_token: str
+ token_type: str = "bearer"
+ expires_at: float
+
+# Token management functions
+def saveToken(interface, tokenType: str, tokenData: dict) -> bool:
+ """Save token data for a specific service"""
+ try:
+ return interface.saveToken(f"tokens{tokenType}", tokenData)
+ except Exception as e:
+ logger.error(f"Error saving {tokenType} token: {str(e)}")
+ return False
+
+def getToken(interface, tokenType: str) -> Optional[dict]:
+ """Get token data for a specific service"""
+ try:
+ return interface.getToken(f"tokens{tokenType}")
+ except Exception as e:
+ logger.error(f"Error getting {tokenType} token: {str(e)}")
+ return None
+
+def deleteToken(interface, tokenType: str) -> bool:
+ """Delete token data for a specific service"""
+ try:
+ return interface.deleteToken(f"tokens{tokenType}")
+ except Exception as e:
+ logger.error(f"Error deleting {tokenType} token: {str(e)}")
+ return False
\ No newline at end of file
diff --git a/modules/interfaces/serviceChatAccess.py b/modules/interfaces/serviceChatAccess.py
new file mode 100644
index 00000000..119da202
--- /dev/null
+++ b/modules/interfaces/serviceChatAccess.py
@@ -0,0 +1,133 @@
+"""
+Access control module for Chat interface.
+Handles user access management and permission checks.
+"""
+
+from typing import Dict, Any, List, Optional
+from modules.interfaces.serviceAppModel import User, UserPrivilege
+
+class ChatAccess:
+ """
+ Access control class for Chat interface.
+ Handles user access management and permission checks.
+ """
+
+ def __init__(self, currentUser: User, db):
+ """Initialize with user context."""
+ self.currentUser = currentUser
+ self.mandateId = currentUser.mandateId
+ self.userId = currentUser.id
+
+ if not self.mandateId or not self.userId:
+ raise ValueError("Invalid user context: mandateId and userId are required")
+
+ self.db = db
+
+ def uam(self, table: str, recordset: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
+ """
+ Unified user access management function that filters data based on user privileges
+ and adds access control attributes.
+
+ Args:
+ table: Name of the table
+ recordset: Recordset to filter based on access rules
+
+ Returns:
+ Filtered recordset with access control attributes
+ """
+ userPrivilege = self.currentUser.privilege
+ filtered_records = []
+
+ # Apply filtering based on privilege
+ if userPrivilege == UserPrivilege.SYSADMIN:
+ filtered_records = recordset # System admins see all records
+ elif userPrivilege == UserPrivilege.ADMIN:
+ # Admins see records in their mandate
+ filtered_records = [r for r in recordset if r.get("mandateId","-") == self.mandateId]
+ else: # Regular users
+ # For prompts, users can see all prompts from their mandate
+ if table == "prompts":
+ filtered_records = [r for r in recordset if r.get("mandateId") == self.mandateId]
+ else:
+ # Users see only their records for other tables
+ filtered_records = [r for r in recordset
+ if r.get("mandateId","-") == self.mandateId and r.get("_createdBy") == self.userId]
+
+ # Add access control attributes to each record
+ for record in filtered_records:
+ record_id = record.get("id")
+
+ # Set access control flags based on user permissions
+ if table == "prompts":
+ record["_hideView"] = False # Everyone can view
+ record["_hideEdit"] = not self.canModify("prompts", record_id)
+ record["_hideDelete"] = not self.canModify("prompts", record_id)
+ elif table == "files":
+ record["_hideView"] = False # Everyone can view
+ record["_hideEdit"] = not self.canModify("files", record_id)
+ record["_hideDelete"] = not self.canModify("files", record_id)
+ record["_hideDownload"] = not self.canModify("files", record_id)
+ elif table == "workflows":
+ record["_hideView"] = False # Everyone can view
+ record["_hideEdit"] = not self.canModify("workflows", record_id)
+ record["_hideDelete"] = not self.canModify("workflows", record_id)
+ elif table == "workflowMessages":
+ record["_hideView"] = False # Everyone can view
+ record["_hideEdit"] = not self.canModify("workflows", record.get("workflowId"))
+ record["_hideDelete"] = not self.canModify("workflows", record.get("workflowId"))
+ elif table == "workflowLogs":
+ record["_hideView"] = False # Everyone can view
+ record["_hideEdit"] = not self.canModify("workflows", record.get("workflowId"))
+ record["_hideDelete"] = not self.canModify("workflows", record.get("workflowId"))
+ else:
+ # Default access control for other tables
+ record["_hideView"] = False
+ record["_hideEdit"] = not self.canModify(table, record_id)
+ record["_hideDelete"] = not self.canModify(table, record_id)
+
+ return filtered_records
+
+ def canModify(self, table: str, recordId: Optional[str] = None) -> bool:
+ """
+ Checks if the current user can modify (create/update/delete) records in a table.
+
+ Args:
+ table: Name of the table
+ recordId: Optional record ID for specific record check
+
+ Returns:
+ Boolean indicating permission
+ """
+ userPrivilege = self.currentUser.privilege
+
+ # System admins can modify anything
+ if userPrivilege == UserPrivilege.SYSADMIN:
+ return True
+
+ # For regular users and admins, check specific cases
+ if recordId is not None:
+ # Get the record to check ownership
+ records = self.db.getRecordset(table, recordFilter={"id": recordId})
+ if not records:
+ return False
+
+ record = records[0]
+
+ # Admins can modify anything in their mandate, if mandate is specified for a record
+ if userPrivilege == UserPrivilege.ADMIN and record.get("mandateId","-") == self.mandateId:
+ return True
+
+ # Regular users can only modify their own records
+ if (record.get("mandateId","-") == self.mandateId and
+ record.get("_createdBy") == self.userId):
+ return True
+
+ return False
+ else:
+ # For general modification permission (e.g., create)
+ # Admins can create anything in their mandate
+ if userPrivilege == UserPrivilege.ADMIN:
+ return True
+
+ # Regular users can create in most tables
+ return True
\ No newline at end of file
diff --git a/modules/interfaces/lucydomInterface.py b/modules/interfaces/serviceChatClass.py
similarity index 66%
rename from modules/interfaces/lucydomInterface.py
rename to modules/interfaces/serviceChatClass.py
index 90177b70..16cd0fc0 100644
--- a/modules/interfaces/lucydomInterface.py
+++ b/modules/interfaces/serviceChatClass.py
@@ -12,10 +12,11 @@ from typing import Dict, Any, List, Optional, Union
import hashlib
from modules.shared.mimeUtils import isTextMimeType
-from modules.interfaces.lucydomAccess import LucydomAccess
-from modules.interfaces.lucydomModel import (
- ChatWorkflow, ChatMessage, ChatLog, ChatStat,
- ChatDocument, UserInputRequest
+from modules.interfaces.serviceChatAccess import ChatAccess
+from modules.interfaces.serviceChatModel import (
+ ChatContent, ChatDocument, ChatStat, ChatMessage,
+ ChatLog, ChatWorkflow, Agent, AgentResponse,
+ TaskItem, TaskPlan, UserInputRequest
)
# DYNAMIC PART: Connectors to the Interface
@@ -26,8 +27,8 @@ from modules.connectors.connectorAiOpenai import ChatService
from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__)
-# Singleton factory for Lucydom instances with AI service per context
-_lucydomInterfaces = {}
+# Singleton factory for Chat instances with AI service per context
+_chatInterfaces = {}
# Custom exceptions for file handling
class FileError(Exception):
@@ -50,14 +51,14 @@ class FileDeletionError(FileError):
"""Exception raised when there's an error deleting a file."""
pass
-class LucydomInterface:
+class ChatInterface:
"""
- Interface to LucyDOM database and AI Connectors.
+ Interface to Chat database and AI Connectors.
Uses the JSON connector for data access with added language support.
"""
def __init__(self):
- """Initializes the Lucydom Interface."""
+ """Initializes the Chat Interface."""
# Initialize database
self._initializeDatabase()
@@ -67,6 +68,7 @@ class LucydomInterface:
# Initialize variables
self.currentUser = None
self.userId = None
+ self.mandateId = None
self.access = None # Will be set when user context is provided
self.aiService = None # Will be set when user context is provided
@@ -78,7 +80,7 @@ class LucydomInterface:
self.currentUser = currentUser
self.userId = currentUser.get("id")
-
+ self.mandateId = currentUser.get("mandateId")
if not self.userId:
raise ValueError("Invalid user context: id is required")
@@ -86,7 +88,7 @@ class LucydomInterface:
self.userLanguage = currentUser.get("language", "en") # Default user language
# Initialize access control with user context
- self.access = LucydomAccess(self.currentUser, self.db)
+ self.access = ChatAccess(self.currentUser, self.db)
# Initialize AI service
self.aiService = ChatService()
@@ -97,10 +99,10 @@ class LucydomInterface:
"""Initializes the database connection."""
try:
# Get configuration values with defaults
- dbHost = APP_CONFIG.get("DB_LUCYDOM_HOST", "data")
- dbDatabase = APP_CONFIG.get("DB_LUCYDOM_DATABASE", "lucydom")
- dbUser = APP_CONFIG.get("DB_LUCYDOM_USER")
- dbPassword = APP_CONFIG.get("DB_LUCYDOM_PASSWORD_SECRET")
+ dbHost = APP_CONFIG.get("DB_CHAT_HOST", "_no_config_default_data")
+ dbDatabase = APP_CONFIG.get("DB_CHAT_DATABASE", "chat")
+ dbUser = APP_CONFIG.get("DB_CHAT_USER")
+ dbPassword = APP_CONFIG.get("DB_CHAT_PASSWORD_SECRET")
# Ensure the database directory exists
os.makedirs(dbHost, exist_ok=True)
@@ -232,447 +234,6 @@ class LucydomInterface:
"""Returns the current timestamp in ISO format"""
return datetime.now().isoformat()
- # Prompt methods
-
- def getAllPrompts(self) -> List[Dict[str, Any]]:
- """Returns prompts based on user access level."""
- allPrompts = self.db.getRecordset("prompts")
- return self._uam("prompts", allPrompts)
-
- def getPrompt(self, promptId: str) -> Optional[Dict[str, Any]]:
- """Returns a prompt by ID if user has access."""
- prompts = self.db.getRecordset("prompts", recordFilter={"id": promptId})
- if not prompts:
- return None
-
- filteredPrompts = self._uam("prompts", prompts)
- return filteredPrompts[0] if filteredPrompts else None
-
- def createPrompt(self, content: str, name: str) -> Dict[str, Any]:
- """Creates a new prompt if user has permission."""
- if not self._canModify("prompts"):
- raise PermissionError("No permission to create prompts")
-
- promptData = {
- "content": content,
- "name": name,
- "createdAt": self._getCurrentTimestamp()
- }
-
- return self.db.recordCreate("prompts", promptData)
-
- def updatePrompt(self, promptId: str, content: str = None, name: str = None) -> Dict[str, Any]:
- """Updates a prompt if user has access."""
- # Check if the prompt exists and user has access
- prompt = self.getPrompt(promptId)
- if not prompt:
- return None
-
- if not self._canModify("prompts", promptId):
- raise PermissionError(f"No permission to update prompt {promptId}")
-
- # Prepare data for update
- promptData = {}
-
- if content is not None:
- promptData["content"] = content
- if name is not None:
- promptData["name"] = name
-
- # Update prompt
- return self.db.recordModify("prompts", promptId, promptData)
-
- def deletePrompt(self, promptId: str) -> bool:
- """Deletes a prompt if user has access."""
- # Check if the prompt exists and user has access
- prompt = self.getPrompt(promptId)
- if not prompt:
- return False
-
- if not self._canModify("prompts", promptId):
- raise PermissionError(f"No permission to delete prompt {promptId}")
-
- return self.db.recordDelete("prompts", promptId)
-
- # File Utilities
-
- def calculateFileHash(self, fileContent: bytes) -> str:
- """Calculates a SHA-256 hash for the file content"""
- return hashlib.sha256(fileContent).hexdigest()
-
- def checkForDuplicateFile(self, fileHash: str) -> Optional[Dict[str, Any]]:
- """Checks if a file with the same hash already exists for the current user and mandate."""
- files = self.db.getRecordset("files", recordFilter={
- "fileHash": fileHash,
- "mandateId": self.currentUser.get("mandateId"),
- "_createdBy": self.currentUser.get("id")
- })
- if files:
- return files[0]
- return None
-
- def getMimeType(self, filename: str) -> str:
- """Determines the MIME type based on the file extension."""
- import os
- ext = os.path.splitext(filename)[1].lower()[1:]
- extensionToMime = {
- "pdf": "application/pdf",
- "docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
- "doc": "application/msword",
- "xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
- "xls": "application/vnd.ms-excel",
- "pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation",
- "ppt": "application/vnd.ms-powerpoint",
- "csv": "text/csv",
- "txt": "text/plain",
- "json": "application/json",
- "xml": "application/xml",
- "html": "text/html",
- "htm": "text/html",
- "jpg": "image/jpeg",
- "jpeg": "image/jpeg",
- "png": "image/png",
- "gif": "image/gif",
- "webp": "image/webp",
- "svg": "image/svg+xml",
- "py": "text/x-python",
- "js": "application/javascript",
- "css": "text/css"
- }
- return extensionToMime.get(ext.lower(), "application/octet-stream")
-
- # File methods - metadata-based operations
-
- def getAllFiles(self) -> List[Dict[str, Any]]:
- """Returns files based on user access level."""
- allFiles = self.db.getRecordset("files")
- return self._uam("files", allFiles)
-
- def getFile(self, fileId: str) -> Optional[Dict[str, Any]]:
- """Returns a file by ID if user has access."""
- files = self.db.getRecordset("files", recordFilter={"id": fileId})
- if not files:
- return None
-
- filteredFiles = self._uam("files", files)
- return filteredFiles[0] if filteredFiles else None
-
- def createFile(self, name: str, mimeType: str, size: int = None, fileHash: str = None) -> Dict[str, Any]:
- """Creates a new file entry if user has permission."""
- if not self._canModify("files"):
- raise PermissionError("No permission to create files")
-
- fileData = {
- "mandateId": self.currentUser.get("mandateId"),
- "name": name,
- "mimeType": mimeType,
- "size": size,
- "fileHash": fileHash,
- "creationDate": self._getCurrentTimestamp()
- }
- return self.db.recordCreate("files", fileData)
-
- def updateFile(self, fileId: str, updateData: Dict[str, Any]) -> Dict[str, Any]:
- """Updates file metadata if user has access."""
- # Check if the file exists and user has access
- file = self.getFile(fileId)
- if not file:
- raise FileNotFoundError(f"File with ID {fileId} not found")
-
- if not self._canModify("files", fileId):
- raise PermissionError(f"No permission to update file {fileId}")
-
- # Update file
- return self.db.recordModify("files", fileId, updateData)
-
- def deleteFile(self, fileId: str) -> bool:
- """Deletes a file if user has access."""
- try:
- # Check if the file exists and user has access
- file = self.getFile(fileId)
-
- if not file:
- raise FileNotFoundError(f"File with ID {fileId} not found")
-
- if not self._canModify("files", fileId):
- raise PermissionError(f"No permission to delete file {fileId}")
-
- # Check for other references to this file (by hash)
- fileHash = file.get("fileHash")
- if fileHash:
- otherReferences = [f for f in self.db.getRecordset("files", recordFilter={"fileHash": fileHash})
- if f.get("id") != fileId]
-
- # Only delete associated fileData if no other references exist
- if not otherReferences:
- try:
- fileDataEntries = self.db.getRecordset("fileData", recordFilter={"id": fileId})
- if fileDataEntries:
- self.db.recordDelete("fileData", fileId)
- logger.debug(f"FileData for file {fileId} deleted")
- except Exception as e:
- logger.warning(f"Error deleting FileData for file {fileId}: {str(e)}")
-
- # Delete the FileItem entry
- return self.db.recordDelete("files", fileId)
-
- except FileNotFoundError as e:
- raise
- except FilePermissionError as e:
- raise
- except Exception as e:
- logger.error(f"Error deleting file {fileId}: {str(e)}")
- raise FileDeletionError(f"Error deleting file: {str(e)}")
-
- # FileData methods - data operations
-
- def createFileData(self, fileId: str, data: bytes) -> bool:
- """Stores the binary data of a file in the database."""
- try:
- import base64
-
- # Check file access
- file = self.getFile(fileId)
- if not file:
- logger.error(f"File with ID {fileId} not found when storing data")
- return False
-
- # Determine if this is a text-based format
- mimeType = file.get("mimeType", "application/octet-stream")
- isTextFormat = isTextMimeType(mimeType)
-
- base64Encoded = False
- fileData = None
-
- if isTextFormat:
- # Try to decode as text
- try:
- textContent = data.decode('utf-8')
- fileData = textContent
- base64Encoded = False
- logger.debug(f"Stored file {fileId} as text")
- except UnicodeDecodeError:
- # Fallback to base64 if text decoding fails
- encodedData = base64.b64encode(data).decode('utf-8')
- fileData = encodedData
- base64Encoded = True
- logger.warning(f"Failed to decode text file {fileId}, falling back to base64")
- else:
- # Binary format - always use base64
- encodedData = base64.b64encode(data).decode('utf-8')
- fileData = encodedData
- base64Encoded = True
- logger.debug(f"Stored file {fileId} as base64")
-
- # Create the fileData record with data and encoding flag
- fileDataObj = {
- "id": fileId,
- "data": fileData,
- "base64Encoded": base64Encoded
- }
-
- self.db.recordCreate("fileData", fileDataObj)
- logger.debug(f"Successfully stored data for file {fileId} (base64Encoded: {base64Encoded})")
- return True
- except Exception as e:
- logger.error(f"Error storing data for file {fileId}: {str(e)}")
- return False
-
- def getFileData(self, fileId: str) -> Optional[bytes]:
- """Returns the binary data of a file if user has access."""
- # Check file access
- file = self.getFile(fileId)
- if not file:
- logger.warning(f"No access to file ID {fileId}")
- return None
-
- import base64
-
- fileDataEntries = self.db.getRecordset("fileData", recordFilter={"id": fileId})
- if not fileDataEntries:
- logger.warning(f"No data found for file ID {fileId}")
- return None
-
- fileDataEntry = fileDataEntries[0]
- if "data" not in fileDataEntry:
- logger.warning(f"No data field in file data for ID {fileId}")
- return None
-
- data = fileDataEntry["data"]
- base64Encoded = fileDataEntry.get("base64Encoded", False)
-
- try:
- if base64Encoded:
- # Decode base64 to bytes
- return base64.b64decode(data)
- else:
- # Convert text to bytes
- return data.encode('utf-8')
- except Exception as e:
- logger.error(f"Error processing file data for {fileId}: {str(e)}")
- return None
-
- def updateFileData(self, fileId: str, data: Union[bytes, str]) -> bool:
- """Updates file data if user has access."""
- # Check file access
- file = self.getFile(fileId)
- if not file:
- logger.error(f"File with ID {fileId} not found when updating data")
- return False
-
- if not self._canModify("files", fileId):
- logger.error(f"No permission to update file data for {fileId}")
- return False
-
- try:
- import base64
-
- # Determine if this is a text-based format
- mimeType = file.get("mimeType", "application/octet-stream")
- isTextFormat = isTextMimeType(mimeType)
-
- base64Encoded = False
- fileData = None
-
- # Convert input data to the right format
- if isinstance(data, bytes):
- if isTextFormat:
- try:
- # Try to convert bytes to text
- fileData = data.decode('utf-8')
- base64Encoded = False
- except UnicodeDecodeError:
- # Fallback to base64 if text decoding fails
- fileData = base64.b64encode(data).decode('utf-8')
- base64Encoded = True
- else:
- # Binary format - use base64
- fileData = base64.b64encode(data).decode('utf-8')
- base64Encoded = True
- elif isinstance(data, str):
- if isTextFormat:
- # Text format - store as text
- fileData = data
- base64Encoded = False
- else:
- # Check if it's already base64 encoded
- try:
- # Try to decode as base64 to validate
- base64.b64decode(data)
- fileData = data
- base64Encoded = True
- except:
- # Not valid base64, encode the string
- fileData = base64.b64encode(data.encode('utf-8')).decode('utf-8')
- base64Encoded = True
- else:
- # Convert to string first
- stringData = str(data)
- if isTextFormat:
- fileData = stringData
- base64Encoded = False
- else:
- fileData = base64.b64encode(stringData.encode('utf-8')).decode('utf-8')
- base64Encoded = True
-
- # Check if a record already exists
- fileDataEntries = self.db.getRecordset("fileData", recordFilter={"id": fileId})
-
- dataUpdate = {
- "data": fileData,
- "base64Encoded": base64Encoded
- }
-
- if fileDataEntries:
- # Update the existing record
- self.db.recordModify("fileData", fileId, dataUpdate)
- logger.debug(f"Updated file data for file ID {fileId} (base64Encoded: {base64Encoded})")
- else:
- # Create a new record
- dataUpdate["id"] = fileId
- self.db.recordCreate("fileData", dataUpdate)
- logger.debug(f"Created new file data for file ID {fileId} (base64Encoded: {base64Encoded})")
-
- return True
- except Exception as e:
- logger.error(f"Error updating data for file {fileId}: {str(e)}")
- return False
-
- def saveUploadedFile(self, fileContent: bytes, fileName: str) -> Dict[str, Any]:
- """Saves an uploaded file if user has permission."""
- try:
- # Check file creation permission
- if not self._canModify("files"):
- raise PermissionError("No permission to upload files")
-
- logger.debug(f"Starting upload process for file: {fileName}")
-
- if not isinstance(fileContent, bytes):
- logger.error(f"Invalid fileContent type: {type(fileContent)}")
- raise ValueError(f"fileContent must be bytes, got {type(fileContent)}")
-
- # Calculate file hash for deduplication
- fileHash = self.calculateFileHash(fileContent)
- logger.debug(f"Calculated file hash: {fileHash}")
-
- # Check for duplicate within same user/mandate
- existingFile = self.checkForDuplicateFile(fileHash)
- if existingFile:
- logger.debug(f"Duplicate found for {fileName}: {existingFile['id']}")
- return existingFile
-
- # Determine MIME type and size
- mimeType = self.getMimeType(fileName)
- fileSize = len(fileContent)
-
- # Save metadata
- logger.debug(f"Saving file metadata to database for file: {fileName}")
- dbFile = self.createFile(
- name=fileName,
- mimeType=mimeType,
- size=fileSize,
- fileHash=fileHash
- )
-
- # Save binary data
- logger.debug(f"Saving file content to database for file: {fileName}")
- self.createFileData(dbFile["id"], fileContent)
-
- logger.debug(f"File upload process completed for: {fileName}")
- return dbFile
-
- except Exception as e:
- logger.error(f"Error in saveUploadedFile for {fileName}: {str(e)}", exc_info=True)
- raise FileStorageError(f"Error saving file: {str(e)}")
-
- def downloadFile(self, fileId: str) -> Optional[Dict[str, Any]]:
- """Returns a file for download if user has access."""
- try:
- # Check file access
- file = self.getFile(fileId)
-
- if not file:
- raise FileNotFoundError(f"File with ID {fileId} not found")
-
- # Get binary data
- fileContent = self.getFileData(fileId)
-
- if fileContent is None:
- raise FileNotFoundError(f"Binary data for file with ID {fileId} not found")
-
- return {
- "id": fileId,
- "name": file.get("name", f"file_{fileId}"),
- "contentType": file.get("mimeType", "application/octet-stream"),
- "size": file.get("size", len(fileContent)),
- "content": fileContent
- }
- except FileNotFoundError as e:
- raise
- except Exception as e:
- logger.error(f"Error downloading file {fileId}: {str(e)}")
- raise FileError(f"Error downloading file: {str(e)}")
-
# Workflow methods
def getAllWorkflows(self) -> List[Dict[str, Any]]:
@@ -1291,17 +852,17 @@ class LucydomInterface:
return None
-def getInterface(currentUser: Dict[str, Any] = None) -> 'LucydomInterface':
+def getInterface(currentUser: Dict[str, Any] = None) -> 'ChatInterface':
"""
- Returns a LucydomInterface instance.
+ Returns a ChatInterface instance.
If currentUser is provided, initializes with user context.
Otherwise, returns an instance with only database access.
"""
# Create new instance if not exists
- if "default" not in _lucydomInterfaces:
- _lucydomInterfaces["default"] = LucydomInterface()
+ if "default" not in _chatInterfaces:
+ _chatInterfaces["default"] = ChatInterface()
- interface = _lucydomInterfaces["default"]
+ interface = _chatInterfaces["default"]
if currentUser:
interface.setUserContext(currentUser)
diff --git a/modules/interfaces/serviceChatModel.py b/modules/interfaces/serviceChatModel.py
new file mode 100644
index 00000000..b220b6c7
--- /dev/null
+++ b/modules/interfaces/serviceChatModel.py
@@ -0,0 +1,130 @@
+"""
+Chat model classes for the chat system.
+"""
+
+from pydantic import BaseModel, Field
+from typing import List, Dict, Any, Optional
+from datetime import datetime
+import uuid
+
+from modules.shared.attributeUtils import Label, BaseModelWithUI
+
+
+# WORKFLOW MODELS
+
+class ChatContent(BaseModelWithUI):
+ """Data model for chat content"""
+ sequenceNr: int = Field(description="Sequence number of the content")
+ name: str = Field(description="Name of the content")
+ data: str = Field(description="The actual content data")
+ mimeType: str = Field(description="MIME type of the content")
+ metadata: Dict[str, Any] = Field(default_factory=dict, description="Additional metadata")
+
+class ChatDocument(BaseModelWithUI):
+ """Data model for a chat document"""
+ id: str = Field(description="Primary key")
+ fileId: int = Field(description="Foreign key to file")
+ filename: str = Field(description="Name of the file")
+ fileSize: int = Field(description="Size of the file")
+ mimeType: str = Field(description="MIME type of the file")
+ contents: List[ChatContent] = Field(default_factory=list, description="List of chat contents")
+
+class ChatStat(BaseModelWithUI):
+ """Data model for chat statistics"""
+ id: str = Field(description="Primary key")
+ processingTime: Optional[float] = Field(None, description="Processing time in seconds")
+ tokenCount: Optional[int] = Field(None, description="Number of tokens processed")
+ bytesSent: Optional[int] = Field(None, description="Number of bytes sent")
+ bytesReceived: Optional[int] = Field(None, description="Number of bytes received")
+
+class ChatLog(BaseModelWithUI):
+ """Data model for a chat log"""
+ id: str = Field(description="Primary key")
+ workflowId: str = Field(description="Foreign key to workflow")
+ message: str = Field(description="Log message")
+ type: str = Field(description="Type of log entry")
+ timestamp: str = Field(description="Timestamp of the log entry")
+ agentName: str = Field(description="Name of the agent")
+ status: str = Field(description="Status of the log entry")
+ progress: Optional[int] = Field(None, description="Progress percentage")
+
+class ChatMessage(BaseModelWithUI):
+ """Data model for a chat message"""
+ id: str = Field(description="Primary key")
+ workflowId: str = Field(description="Foreign key to workflow")
+ parentMessageId: Optional[str] = Field(None, description="Parent message ID for threading")
+ agentName: Optional[str] = Field(None, description="Name of the agent")
+ documents: List[ChatDocument] = Field(default_factory=list, description="Associated documents")
+ message: Optional[str] = Field(None, description="Message content")
+ role: str = Field(description="Role of the message sender")
+ status: str = Field(description="Status of the message")
+ sequenceNr: int = Field(description="Sequence number of the message")
+ startedAt: str = Field(description="When the message processing started")
+ finishedAt: Optional[str] = Field(None, description="When the message processing finished")
+ stats: Optional[ChatStat] = Field(None, description="Statistics for this message")
+
+class ChatWorkflow(BaseModelWithUI):
+ """Data model for a chat workflow"""
+ id: str = Field(description="Primary key")
+ mandateId: str = Field(description="ID of the mandate this workflow belongs to")
+ status: str = Field(description="Current status of the workflow")
+ name: Optional[str] = Field(None, description="Name of the workflow")
+ currentRound: int = Field(description="Current round number")
+ lastActivity: str = Field(description="Timestamp of last activity")
+ startedAt: str = Field(description="When the workflow started")
+ logs: List[ChatLog] = Field(default_factory=list, description="Workflow logs")
+ messages: List[ChatMessage] = Field(default_factory=list, description="Messages in the workflow")
+ stats: Optional[ChatStat] = Field(None, description="Workflow statistics")
+
+ label: Label = Field(
+ default=Label(default="Chat Workflow", translations={"en": "Chat Workflow", "fr": "Flux de travail de chat"}),
+ description="Label for the class"
+ )
+
+ fieldLabels: Dict[str, Label] = {
+ "id": Label(default="ID", translations={}),
+ "mandateId": Label(default="Mandate ID", translations={"en": "Mandate ID", "fr": "ID du mandat"}),
+ "status": Label(default="Status", translations={"en": "Status", "fr": "Statut"}),
+ "name": Label(default="Name", translations={"en": "Name", "fr": "Nom"}),
+ "currentRound": Label(default="Current Round", translations={"en": "Current Round", "fr": "Tour actuel"}),
+ "lastActivity": Label(default="Last Activity", translations={"en": "Last Activity", "fr": "Dernière activité"}),
+ "startedAt": Label(default="Started At", translations={"en": "Started At", "fr": "Démarré le"}),
+ "logs": Label(default="Logs", translations={"en": "Logs", "fr": "Journaux"}),
+ "messages": Label(default="Messages", translations={"en": "Messages", "fr": "Messages"}),
+ "stats": Label(default="Statistics", translations={"en": "Statistics", "fr": "Statistiques"})
+ }
+
+# AGENT AND TASK MODELS
+
+class Agent(BaseModelWithUI):
+ """Data model for an agent"""
+ id: str = Field(description="Primary key")
+ name: str = Field(description="Name of the agent")
+ description: str = Field(description="Description of the agent")
+ capabilities: List[str] = Field(default_factory=list, description="List of agent capabilities")
+
+class AgentResponse(BaseModelWithUI):
+ """Data model for an agent response"""
+ response: str = Field(description="Response content from the agent")
+ documents: List[ChatDocument] = Field(default_factory=list, description="Documents associated with the response")
+
+class TaskItem(BaseModelWithUI):
+ """Data model for a task item"""
+ sequenceNr: int = Field(description="Sequence number of the task")
+ agentName: str = Field(description="Name of the agent assigned to this task")
+ prompt: str = Field(description="Prompt for the task")
+ userLanguage: str = Field(description="User's preferred language")
+ filesInput: List[str] = Field(default_factory=list, description="Input files (format: filename;[documentId])")
+ filesOutput: List[str] = Field(default_factory=list, description="Output files (format: filename)")
+
+class TaskPlan(BaseModelWithUI):
+ """Data model for a task plan"""
+ fileList: List[str] = Field(default_factory=list, description="List of files (format: filename)")
+ taskItems: List[TaskItem] = Field(default_factory=list, description="List of task items in the plan")
+ userLanguage: str = Field(description="User's preferred language")
+ userResponse: str = Field(description="User's response or feedback")
+
+class UserInputRequest(BaseModelWithUI):
+ """Data model for a user input request"""
+ prompt: str = Field(description="Prompt for the user")
+ listFileId: List[int] = Field(default_factory=list, description="List of file IDs")
\ No newline at end of file
diff --git a/modules/interfaces/lucydomAccess.py b/modules/interfaces/serviceManagementAccess.py
similarity index 97%
rename from modules/interfaces/lucydomAccess.py
rename to modules/interfaces/serviceManagementAccess.py
index f333115b..b8db3239 100644
--- a/modules/interfaces/lucydomAccess.py
+++ b/modules/interfaces/serviceManagementAccess.py
@@ -1,13 +1,13 @@
"""
-Access control module for LucyDOM interface.
+Access control module for Management interface.
Handles user access management and permission checks.
"""
from typing import Dict, Any, List, Optional
-class LucydomAccess:
+class ManagementAccess:
"""
- Access control class for LucyDOM interface.
+ Access control class for Management interface.
Handles user access management and permission checks.
"""
diff --git a/modules/interfaces/serviceManagementClass.py b/modules/interfaces/serviceManagementClass.py
new file mode 100644
index 00000000..7e525607
--- /dev/null
+++ b/modules/interfaces/serviceManagementClass.py
@@ -0,0 +1,704 @@
+"""
+Interface to Management database and AI Connectors.
+Uses the JSON connector for data access with added language support.
+"""
+
+import os
+import logging
+import uuid
+from datetime import datetime
+from typing import Dict, Any, List, Optional, Union
+
+import hashlib
+
+from modules.shared.mimeUtils import isTextMimeType
+from modules.interfaces.serviceManagementAccess import ManagementAccess
+from modules.interfaces.serviceManagementModel import (
+ Prompt, FileItem, FileData
+)
+from modules.interfaces.serviceAppModel import User, Mandate, UserPrivilege
+
+# DYNAMIC PART: Connectors to the Interface
+from modules.connectors.connectorDbJson import DatabaseConnector
+from modules.connectors.connectorAiOpenai import ChatService
+
+# Basic Configurations
+from modules.shared.configuration import APP_CONFIG
+logger = logging.getLogger(__name__)
+
+# Singleton factory for Management instances with AI service per context
+_instancesManagement = {}
+
+# Custom exceptions for file handling
+class FileError(Exception):
+ """Base class for file handling exceptions."""
+ pass
+
+class FileNotFoundError(FileError):
+ """Exception raised when a file is not found."""
+ pass
+
+class FileStorageError(FileError):
+ """Exception raised when there's an error storing a file."""
+ pass
+
+class FilePermissionError(FileError):
+ """Exception raised when there's a permission issue with a file."""
+ pass
+
+class FileDeletionError(FileError):
+ """Exception raised when there's an error deleting a file."""
+ pass
+
+class ServiceManagement:
+ """
+ Interface to Management database and AI Connectors.
+ Uses the JSON connector for data access with added language support.
+ """
+
+ def __init__(self):
+ """Initializes the Management Interface."""
+ # Initialize database
+ self._initializeDatabase()
+
+ # Initialize standard records if needed
+ self._initRecords()
+
+ # Initialize variables
+ self.currentUser: Optional[User] = None
+ self.userId: Optional[str] = None
+ self.access: Optional[ManagementAccess] = None # Will be set when user context is provided
+ self.aiService: Optional[ChatService] = None # Will be set when user context is provided
+
+ def setUserContext(self, currentUser: User):
+ """Sets the user context for the interface."""
+ if not currentUser:
+ logger.info("Initializing interface without user context")
+ return
+
+ self.currentUser = currentUser
+ self.userId = currentUser.id
+
+ if not self.userId:
+ raise ValueError("Invalid user context: id is required")
+
+ # Add language settings
+ self.userLanguage = currentUser.language # Default user language
+
+ # Initialize access control with user context
+ self.access = ManagementAccess(self.currentUser, self.db)
+
+ # Initialize AI service
+ self.aiService = ChatService()
+
+ logger.debug(f"User context set: userId={self.userId}")
+
+ def _initializeDatabase(self):
+ """Initializes the database connection."""
+ try:
+ # Get configuration values with defaults
+ dbHost = APP_CONFIG.get("DB_MANAGEMENT_HOST", "_no_config_default_data")
+ dbDatabase = APP_CONFIG.get("DB_MANAGEMENT_DATABASE", "management")
+ dbUser = APP_CONFIG.get("DB_MANAGEMENT_USER")
+ dbPassword = APP_CONFIG.get("DB_MANAGEMENT_PASSWORD_SECRET")
+
+ # Ensure the database directory exists
+ os.makedirs(dbHost, exist_ok=True)
+
+ self.db = DatabaseConnector(
+ dbHost=dbHost,
+ dbDatabase=dbDatabase,
+ dbUser=dbUser,
+ dbPassword=dbPassword
+ )
+
+ logger.info("Database initialized successfully")
+ except Exception as e:
+ logger.error(f"Failed to initialize database: {str(e)}")
+ raise
+
+ def _initRecords(self):
+ """Initializes standard records in the database if they don't exist."""
+ try:
+ # Initialize standard prompts
+ self._initializeStandardPrompts()
+
+ # Add other record initializations here
+
+ logger.info("Standard records initialized successfully")
+ except Exception as e:
+ logger.error(f"Failed to initialize standard records: {str(e)}")
+ raise
+
+ def _initializeStandardPrompts(self):
+ """Creates standard prompts if they don't exist."""
+ prompts = self.db.getRecordset("prompts")
+ logger.debug(f"Found {len(prompts)} existing prompts")
+
+ if not prompts:
+ logger.debug("Creating standard prompts")
+
+ # Define standard prompts
+ standardPrompts = [
+ {
+ "content": "Research the current market trends and developments in [TOPIC]. Collect information about leading companies, innovative products or services, and current challenges. Present the results in a structured overview with relevant data and sources.",
+ "name": "Web Research: Market Research"
+ },
+ {
+ "content": "Analyze the attached dataset on [TOPIC] and identify the most important trends, patterns, and anomalies. Perform statistical calculations to support your findings. Present the results in a clearly structured analysis and draw relevant conclusions.",
+ "name": "Analysis: Data Analysis"
+ },
+ {
+ "content": "Create a detailed protocol of our meeting on [TOPIC]. Capture all discussed points, decisions made, and agreed measures. Structure the protocol clearly with agenda items, participant list, and clear responsibilities for follow-up actions.",
+ "name": "Protocol: Meeting Minutes"
+ },
+ {
+ "content": "Develop a UI/UX design concept for [APPLICATION/WEBSITE]. Consider the target audience, main functions, and brand identity. Describe the visual design, navigation, interaction patterns, and information architecture. Explain how the design optimizes user-friendliness and user experience.",
+ "name": "Design: UI/UX Design"
+ },
+ {
+ "content": "Gib mir die ersten 1000 Primzahlen",
+ "name": "Code: Primzahlen"
+ },
+ {
+ "content": "Bereite mir eine formelle E-Mail an peter.muster@domain.com vor, um meinen Termin von 10 Uhr auf Freitag zu scheiben.",
+ "name": "Mail: Vorbereitung"
+ },
+ ]
+
+ # Create prompts
+ for promptData in standardPrompts:
+ createdPrompt = self.db.recordCreate("prompts", promptData)
+ logger.debug(f"Prompt '{promptData.get('name', 'Standard')}' was created with ID {createdPrompt['id']} and context mandate={createdPrompt.get('mandateId')}, user={createdPrompt.get('_createdBy')}")
+ else:
+ logger.debug("Prompts already exist, skipping creation")
+
+ def _uam(self, table: str, recordset: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
+ """Delegate to access control module."""
+ return self.access.uam(table, recordset)
+
+ def _canModify(self, table: str, recordId: Optional[str] = None) -> bool:
+ """Delegate to access control module."""
+ return self.access.canModify(table, recordId)
+
+ # Language support method
+
+ def setUserLanguage(self, languageCode: str):
+ """Set the user's preferred language"""
+ self.userLanguage = languageCode
+ logger.debug(f"User language set to: {languageCode}")
+
+ # AI Call Root Function
+
+ async def callAi(self, messages: List[Dict[str, str]], produceUserAnswer: bool = False, temperature: float = None) -> str:
+ """Enhanced AI service call with language support."""
+ if not self.aiService:
+ logger.error("AI service not set in ServiceManagement")
+ return "Error: AI service not available"
+
+ # Add language instruction for user-facing responses
+ if produceUserAnswer and self.userLanguage:
+ ltext= f"Please respond in '{self.userLanguage}' language."
+ if messages and messages[0]["role"] == "system":
+ if "language" not in messages[0]["content"].lower():
+ messages[0]["content"] = f"{ltext} {messages[0]['content']}"
+ else:
+ # Insert a system message with language instruction
+ messages.insert(0, {
+ "role": "system",
+ "content": ltext
+ })
+
+ # Call the AI service
+ if temperature is not None:
+ return await self.aiService.callApi(messages, temperature=temperature)
+ else:
+ return await self.aiService.callApi(messages)
+
+ async def callAi4Image(self, imageData: Union[str, bytes], mimeType: str = None, prompt: str = "Describe this image") -> str:
+ """Enhanced AI service call with language support."""
+ if not self.aiService:
+ logger.error("AI service not set in ServiceManagement")
+ return "Error: AI service not available"
+ return await self.aiService.analyzeImage(imageData, mimeType, prompt)
+
+ # Utilities
+
+ def getInitialId(self, table: str) -> Optional[str]:
+ """Returns the initial ID for a table."""
+ return self.db.getInitialId(table)
+
+ def _getCurrentTimestamp(self) -> str:
+ """Returns the current timestamp in ISO format"""
+ return datetime.now().isoformat()
+
+ # Prompt methods
+
+ def getAllPrompts(self) -> List[Prompt]:
+ """Returns prompts based on user access level."""
+ allPrompts = self.db.getRecordset("prompts")
+ filteredPrompts = self._uam("prompts", allPrompts)
+ return [Prompt.from_dict(prompt) for prompt in filteredPrompts]
+
+ def getPrompt(self, promptId: str) -> Optional[Prompt]:
+ """Returns a prompt by ID if user has access."""
+ prompts = self.db.getRecordset("prompts", recordFilter={"id": promptId})
+ if not prompts:
+ return None
+
+ filteredPrompts = self._uam("prompts", prompts)
+ return Prompt.from_dict(filteredPrompts[0]) if filteredPrompts else None
+
+ def createPrompt(self, content: str, name: str) -> Prompt:
+ """Creates a new prompt if user has permission."""
+ if not self._canModify("prompts"):
+ raise PermissionError("No permission to create prompts")
+
+ promptData = Prompt(
+ content=content,
+ name=name,
+ createdAt=self._getCurrentTimestamp()
+ )
+
+ createdRecord = self.db.recordCreate("prompts", promptData.to_dict())
+ return Prompt.from_dict(createdRecord)
+
+ def updatePrompt(self, promptId: str, content: str = None, name: str = None) -> Prompt:
+ """Updates a prompt if user has access."""
+ # Check if the prompt exists and user has access
+ prompt = self.getPrompt(promptId)
+ if not prompt:
+ raise ValueError(f"Prompt {promptId} not found")
+
+ if not self._canModify("prompts", promptId):
+ raise PermissionError(f"No permission to update prompt {promptId}")
+
+ # Update prompt data using model
+ updatedData = prompt.model_dump()
+ if content is not None:
+ updatedData["content"] = content
+ if name is not None:
+ updatedData["name"] = name
+
+ updatedPrompt = Prompt.from_dict(updatedData)
+
+ # Update prompt
+ self.db.recordModify("prompts", promptId, updatedPrompt.to_dict())
+
+ # Get updated prompt
+ updatedPrompt = self.getPrompt(promptId)
+ if not updatedPrompt:
+ raise ValueError("Failed to retrieve updated prompt")
+
+ return updatedPrompt
+
+ def deletePrompt(self, promptId: str) -> bool:
+ """Deletes a prompt if user has access."""
+ # Check if the prompt exists and user has access
+ prompt = self.getPrompt(promptId)
+ if not prompt:
+ return False
+
+ if not self._canModify("prompts", promptId):
+ raise PermissionError(f"No permission to delete prompt {promptId}")
+
+ return self.db.recordDelete("prompts", promptId)
+
+ # File Utilities
+
+ def calculateFileHash(self, fileContent: bytes) -> str:
+ """Calculates a SHA-256 hash for the file content"""
+ return hashlib.sha256(fileContent).hexdigest()
+
+ def checkForDuplicateFile(self, fileHash: str) -> Optional[Dict[str, Any]]:
+ """Checks if a file with the same hash already exists for the current user and mandate."""
+ files = self.db.getRecordset("files", recordFilter={
+ "fileHash": fileHash,
+ "mandateId": self.currentUser.get("mandateId"),
+ "_createdBy": self.currentUser.get("id")
+ })
+ if files:
+ return files[0]
+ return None
+
+ def getMimeType(self, filename: str) -> str:
+ """Determines the MIME type based on the file extension."""
+ import os
+ ext = os.path.splitext(filename)[1].lower()[1:]
+ extensionToMime = {
+ "pdf": "application/pdf",
+ "docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
+ "doc": "application/msword",
+ "xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
+ "xls": "application/vnd.ms-excel",
+ "pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation",
+ "ppt": "application/vnd.ms-powerpoint",
+ "csv": "text/csv",
+ "txt": "text/plain",
+ "json": "application/json",
+ "xml": "application/xml",
+ "html": "text/html",
+ "htm": "text/html",
+ "jpg": "image/jpeg",
+ "jpeg": "image/jpeg",
+ "png": "image/png",
+ "gif": "image/gif",
+ "webp": "image/webp",
+ "svg": "image/svg+xml",
+ "py": "text/x-python",
+ "js": "application/javascript",
+ "css": "text/css"
+ }
+ return extensionToMime.get(ext.lower(), "application/octet-stream")
+
+ # File methods - metadata-based operations
+
+ def getAllFiles(self) -> List[Dict[str, Any]]:
+ """Returns files based on user access level."""
+ allFiles = self.db.getRecordset("files")
+ return self._uam("files", allFiles)
+
+ def getFile(self, fileId: str) -> Optional[Dict[str, Any]]:
+ """Returns a file by ID if user has access."""
+ files = self.db.getRecordset("files", recordFilter={"id": fileId})
+ if not files:
+ return None
+
+ filteredFiles = self._uam("files", files)
+ return filteredFiles[0] if filteredFiles else None
+
+ def createFile(self, name: str, mimeType: str, size: int = None, fileHash: str = None) -> Dict[str, Any]:
+ """Creates a new file entry if user has permission."""
+ if not self._canModify("files"):
+ raise PermissionError("No permission to create files")
+
+ fileData = {
+ "mandateId": self.currentUser.get("mandateId"),
+ "name": name,
+ "mimeType": mimeType,
+ "size": size,
+ "fileHash": fileHash,
+ "creationDate": self._getCurrentTimestamp()
+ }
+ return self.db.recordCreate("files", fileData)
+
+ def updateFile(self, fileId: str, updateData: Dict[str, Any]) -> Dict[str, Any]:
+ """Updates file metadata if user has access."""
+ # Check if the file exists and user has access
+ file = self.getFile(fileId)
+ if not file:
+ raise FileNotFoundError(f"File with ID {fileId} not found")
+
+ if not self._canModify("files", fileId):
+ raise PermissionError(f"No permission to update file {fileId}")
+
+ # Update file
+ return self.db.recordModify("files", fileId, updateData)
+
+ def deleteFile(self, fileId: str) -> bool:
+ """Deletes a file if user has access."""
+ try:
+ # Check if the file exists and user has access
+ file = self.getFile(fileId)
+
+ if not file:
+ raise FileNotFoundError(f"File with ID {fileId} not found")
+
+ if not self._canModify("files", fileId):
+ raise PermissionError(f"No permission to delete file {fileId}")
+
+ # Check for other references to this file (by hash)
+ fileHash = file.get("fileHash")
+ if fileHash:
+ otherReferences = [f for f in self.db.getRecordset("files", recordFilter={"fileHash": fileHash})
+ if f.get("id") != fileId]
+
+ # Only delete associated fileData if no other references exist
+ if not otherReferences:
+ try:
+ fileDataEntries = self.db.getRecordset("fileData", recordFilter={"id": fileId})
+ if fileDataEntries:
+ self.db.recordDelete("fileData", fileId)
+ logger.debug(f"FileData for file {fileId} deleted")
+ except Exception as e:
+ logger.warning(f"Error deleting FileData for file {fileId}: {str(e)}")
+
+ # Delete the FileItem entry
+ return self.db.recordDelete("files", fileId)
+
+ except FileNotFoundError as e:
+ raise
+ except FilePermissionError as e:
+ raise
+ except Exception as e:
+ logger.error(f"Error deleting file {fileId}: {str(e)}")
+ raise FileDeletionError(f"Error deleting file: {str(e)}")
+
+ # FileData methods - data operations
+
+ def createFileData(self, fileId: str, data: bytes) -> bool:
+ """Stores the binary data of a file in the database."""
+ try:
+ import base64
+
+ # Check file access
+ file = self.getFile(fileId)
+ if not file:
+ logger.error(f"File with ID {fileId} not found when storing data")
+ return False
+
+ # Determine if this is a text-based format
+ mimeType = file.get("mimeType", "application/octet-stream")
+ isTextFormat = isTextMimeType(mimeType)
+
+ base64Encoded = False
+ fileData = None
+
+ if isTextFormat:
+ # Try to decode as text
+ try:
+ textContent = data.decode('utf-8')
+ fileData = textContent
+ base64Encoded = False
+ logger.debug(f"Stored file {fileId} as text")
+ except UnicodeDecodeError:
+ # Fallback to base64 if text decoding fails
+ encodedData = base64.b64encode(data).decode('utf-8')
+ fileData = encodedData
+ base64Encoded = True
+ logger.warning(f"Failed to decode text file {fileId}, falling back to base64")
+ else:
+ # Binary format - always use base64
+ encodedData = base64.b64encode(data).decode('utf-8')
+ fileData = encodedData
+ base64Encoded = True
+ logger.debug(f"Stored file {fileId} as base64")
+
+ # Create the fileData record with data and encoding flag
+ fileDataObj = {
+ "id": fileId,
+ "data": fileData,
+ "base64Encoded": base64Encoded
+ }
+
+ self.db.recordCreate("fileData", fileDataObj)
+ logger.debug(f"Successfully stored data for file {fileId} (base64Encoded: {base64Encoded})")
+ return True
+ except Exception as e:
+ logger.error(f"Error storing data for file {fileId}: {str(e)}")
+ return False
+
+ def getFileData(self, fileId: str) -> Optional[bytes]:
+ """Returns the binary data of a file if user has access."""
+ # Check file access
+ file = self.getFile(fileId)
+ if not file:
+ logger.warning(f"No access to file ID {fileId}")
+ return None
+
+ import base64
+
+ fileDataEntries = self.db.getRecordset("fileData", recordFilter={"id": fileId})
+ if not fileDataEntries:
+ logger.warning(f"No data found for file ID {fileId}")
+ return None
+
+ fileDataEntry = fileDataEntries[0]
+ if "data" not in fileDataEntry:
+ logger.warning(f"No data field in file data for ID {fileId}")
+ return None
+
+ data = fileDataEntry["data"]
+ base64Encoded = fileDataEntry.get("base64Encoded", False)
+
+ try:
+ if base64Encoded:
+ # Decode base64 to bytes
+ return base64.b64decode(data)
+ else:
+ # Convert text to bytes
+ return data.encode('utf-8')
+ except Exception as e:
+ logger.error(f"Error processing file data for {fileId}: {str(e)}")
+ return None
+
+ def updateFileData(self, fileId: str, data: Union[bytes, str]) -> bool:
+ """Updates file data if user has access."""
+ # Check file access
+ file = self.getFile(fileId)
+ if not file:
+ logger.error(f"File with ID {fileId} not found when updating data")
+ return False
+
+ if not self._canModify("files", fileId):
+ logger.error(f"No permission to update file data for {fileId}")
+ return False
+
+ try:
+ import base64
+
+ # Determine if this is a text-based format
+ mimeType = file.get("mimeType", "application/octet-stream")
+ isTextFormat = isTextMimeType(mimeType)
+
+ base64Encoded = False
+ fileData = None
+
+ # Convert input data to the right format
+ if isinstance(data, bytes):
+ if isTextFormat:
+ try:
+ # Try to convert bytes to text
+ fileData = data.decode('utf-8')
+ base64Encoded = False
+ except UnicodeDecodeError:
+ # Fallback to base64 if text decoding fails
+ fileData = base64.b64encode(data).decode('utf-8')
+ base64Encoded = True
+ else:
+ # Binary format - use base64
+ fileData = base64.b64encode(data).decode('utf-8')
+ base64Encoded = True
+ elif isinstance(data, str):
+ if isTextFormat:
+ # Text format - store as text
+ fileData = data
+ base64Encoded = False
+ else:
+ # Check if it's already base64 encoded
+ try:
+ # Try to decode as base64 to validate
+ base64.b64decode(data)
+ fileData = data
+ base64Encoded = True
+ except:
+ # Not valid base64, encode the string
+ fileData = base64.b64encode(data.encode('utf-8')).decode('utf-8')
+ base64Encoded = True
+ else:
+ # Convert to string first
+ stringData = str(data)
+ if isTextFormat:
+ fileData = stringData
+ base64Encoded = False
+ else:
+ fileData = base64.b64encode(stringData.encode('utf-8')).decode('utf-8')
+ base64Encoded = True
+
+ # Check if a record already exists
+ fileDataEntries = self.db.getRecordset("fileData", recordFilter={"id": fileId})
+
+ dataUpdate = {
+ "data": fileData,
+ "base64Encoded": base64Encoded
+ }
+
+ if fileDataEntries:
+ # Update the existing record
+ self.db.recordModify("fileData", fileId, dataUpdate)
+ logger.debug(f"Updated file data for file ID {fileId} (base64Encoded: {base64Encoded})")
+ else:
+ # Create a new record
+ dataUpdate["id"] = fileId
+ self.db.recordCreate("fileData", dataUpdate)
+ logger.debug(f"Created new file data for file ID {fileId} (base64Encoded: {base64Encoded})")
+
+ return True
+ except Exception as e:
+ logger.error(f"Error updating data for file {fileId}: {str(e)}")
+ return False
+
+ def saveUploadedFile(self, fileContent: bytes, fileName: str) -> Dict[str, Any]:
+ """Saves an uploaded file if user has permission."""
+ try:
+ # Check file creation permission
+ if not self._canModify("files"):
+ raise PermissionError("No permission to upload files")
+
+ logger.debug(f"Starting upload process for file: {fileName}")
+
+ if not isinstance(fileContent, bytes):
+ logger.error(f"Invalid fileContent type: {type(fileContent)}")
+ raise ValueError(f"fileContent must be bytes, got {type(fileContent)}")
+
+ # Calculate file hash for deduplication
+ fileHash = self.calculateFileHash(fileContent)
+ logger.debug(f"Calculated file hash: {fileHash}")
+
+ # Check for duplicate within same user/mandate
+ existingFile = self.checkForDuplicateFile(fileHash)
+ if existingFile:
+ logger.debug(f"Duplicate found for {fileName}: {existingFile['id']}")
+ return existingFile
+
+ # Determine MIME type and size
+ mimeType = self.getMimeType(fileName)
+ fileSize = len(fileContent)
+
+ # Save metadata
+ logger.debug(f"Saving file metadata to database for file: {fileName}")
+ dbFile = self.createFile(
+ name=fileName,
+ mimeType=mimeType,
+ size=fileSize,
+ fileHash=fileHash
+ )
+
+ # Save binary data
+ logger.debug(f"Saving file content to database for file: {fileName}")
+ self.createFileData(dbFile["id"], fileContent)
+
+ logger.debug(f"File upload process completed for: {fileName}")
+ return dbFile
+
+ except Exception as e:
+ logger.error(f"Error in saveUploadedFile for {fileName}: {str(e)}", exc_info=True)
+ raise FileStorageError(f"Error saving file: {str(e)}")
+
+ def downloadFile(self, fileId: str) -> Optional[Dict[str, Any]]:
+ """Returns a file for download if user has access."""
+ try:
+ # Check file access
+ file = self.getFile(fileId)
+
+ if not file:
+ raise FileNotFoundError(f"File with ID {fileId} not found")
+
+ # Get binary data
+ fileContent = self.getFileData(fileId)
+
+ if fileContent is None:
+ raise FileNotFoundError(f"Binary data for file with ID {fileId} not found")
+
+ return {
+ "id": fileId,
+ "name": file.get("name", f"file_{fileId}"),
+ "contentType": file.get("mimeType", "application/octet-stream"),
+ "size": file.get("size", len(fileContent)),
+ "content": fileContent
+ }
+ except FileNotFoundError as e:
+ raise
+ except Exception as e:
+ logger.error(f"Error downloading file {fileId}: {str(e)}")
+ raise FileError(f"Error downloading file: {str(e)}")
+
+
+def getInterface(currentUser: Optional[User] = None) -> 'ServiceManagement':
+ """
+ Returns a ServiceManagement instance.
+ If currentUser is provided, initializes with user context.
+ Otherwise, returns an instance with only database access.
+ """
+ # Create new instance if not exists
+ if "default" not in _instancesManagement:
+ _instancesManagement["default"] = ServiceManagement()
+
+ interface = _instancesManagement["default"]
+
+ if currentUser:
+ interface.setUserContext(currentUser)
+ else:
+ logger.info("Returning interface without user context")
+
+ return interface
\ No newline at end of file
diff --git a/modules/interfaces/serviceManagementModel.py b/modules/interfaces/serviceManagementModel.py
new file mode 100644
index 00000000..2599e700
--- /dev/null
+++ b/modules/interfaces/serviceManagementModel.py
@@ -0,0 +1,75 @@
+"""
+Service Management model classes for the service management system.
+Updated to match the Entity Relation Diagram structure.
+"""
+
+from pydantic import BaseModel, Field
+from typing import List, Dict, Any, Optional
+from datetime import datetime
+import uuid
+
+from modules.shared.attributeUtils import Label, BaseModelWithUI
+
+# CORE MODELS
+
+class FileItem(BaseModelWithUI):
+ """Data model for a file item"""
+ id: int = Field(description="Primary key")
+ mandateId: str = Field(description="ID of the mandate this file belongs to")
+ filename: str = Field(description="Name of the file")
+ mimeType: str = Field(description="MIME type of the file")
+ workflowId: Optional[str] = Field(None, description="Foreign key to workflow")
+ fileHash: str = Field(description="Hash of the file")
+ fileSize: int = Field(description="Size of the file in bytes")
+
+ label: Label = Field(
+ default=Label(default="File Item", translations={"en": "File Item", "fr": "Élément de fichier"}),
+ description="Label for the class"
+ )
+
+ fieldLabels: Dict[str, Label] = {
+ "id": Label(default="ID", translations={}),
+ "mandateId": Label(default="Mandate ID", translations={"en": "Mandate ID", "fr": "ID du mandat"}),
+ "filename": Label(default="Filename", translations={"en": "Filename", "fr": "Nom de fichier"}),
+ "mimeType": Label(default="MIME Type", translations={"en": "MIME Type", "fr": "Type MIME"}),
+ "workflowId": Label(default="Workflow ID", translations={"en": "Workflow ID", "fr": "ID du flux de travail"}),
+ "fileHash": Label(default="File Hash", translations={"en": "File Hash", "fr": "Hash du fichier"}),
+ "fileSize": Label(default="File Size", translations={"en": "File Size", "fr": "Taille du fichier"})
+ }
+
+class FileData(BaseModelWithUI):
+ """Data model for file data"""
+ id: int = Field(description="Primary key")
+ data: str = Field(description="File data content")
+ base64Encoded: bool = Field(description="Whether the data is base64 encoded")
+
+ label: Label = Field(
+ default=Label(default="File Data", translations={"en": "File Data", "fr": "Données de fichier"}),
+ description="Label for the class"
+ )
+
+ fieldLabels: Dict[str, Label] = {
+ "id": Label(default="ID", translations={}),
+ "data": Label(default="Data", translations={"en": "Data", "fr": "Données"}),
+ "base64Encoded": Label(default="Base64 Encoded", translations={"en": "Base64 Encoded", "fr": "Encodé en Base64"})
+ }
+
+class Prompt(BaseModelWithUI):
+ """Data model for a prompt"""
+ id: int = Field(description="Primary key")
+ mandateId: str = Field(description="ID of the mandate this prompt belongs to")
+ content: str = Field(description="Content of the prompt")
+ name: str = Field(description="Name of the prompt")
+
+ label: Label = Field(
+ default=Label(default="Prompt", translations={"en": "Prompt", "fr": "Invite"}),
+ description="Label for the class"
+ )
+
+ fieldLabels: Dict[str, Label] = {
+ "id": Label(default="ID", translations={}),
+ "mandateId": Label(default="Mandate ID", translations={"en": "Mandate ID", "fr": "ID du mandat"}),
+ "content": Label(default="Content", translations={"en": "Content", "fr": "Contenu"}),
+ "name": Label(default="Name", translations={"en": "Name", "fr": "Nom"})
+ }
+
diff --git a/modules/routes/routeAdmin.py b/modules/routes/routeAdmin.py
new file mode 100644
index 00000000..c63b4ff5
--- /dev/null
+++ b/modules/routes/routeAdmin.py
@@ -0,0 +1,63 @@
+from fastapi import APIRouter, Response, Depends
+from fastapi.responses import FileResponse
+from fastapi.staticfiles import StaticFiles
+import os
+import logging
+from pathlib import Path as FilePath
+from typing import Dict, Any
+
+from modules.shared.configuration import APP_CONFIG
+from modules.security.auth import limiter, getCurrentUser
+
+router = APIRouter(
+ prefix="",
+ tags=["General"],
+ responses={404: {"description": "Not found"}}
+)
+
+# Static folder setup - using absolute path from app root
+baseDir = FilePath(__file__).parent.parent.parent # Go up to gateway root
+staticFolder = baseDir / "static"
+os.makedirs(staticFolder, exist_ok=True)
+
+logger = logging.getLogger(__name__)
+
+router = APIRouter(
+ prefix="",
+ tags=["Administration"],
+ responses={404: {"description": "Not found"}}
+)
+
+# Mount static files
+router.mount("/static", StaticFiles(directory=str(staticFolder), html=True), name="static")
+
+@router.get("/")
+@limiter.limit("30/minute")
+async def root():
+ """API status endpoint"""
+ return {
+ "status": "online",
+ "message": "Data Platform API is active",
+ "allowedOrigins": f"Allowed origins are {APP_CONFIG.get('APP_ALLOWED_ORIGINS')}"
+ }
+
+@router.get("/api/environment")
+@limiter.limit("30/minute")
+async def get_environment(currentUser: Dict[str, Any] = Depends(getCurrentUser)):
+ """Get environment configuration for frontend"""
+ return {
+ "apiBaseUrl": APP_CONFIG.get("APP_API_URL", ""),
+ "environment": APP_CONFIG.get("APP_ENV", "development"),
+ "instanceLabel": APP_CONFIG.get("APP_ENV_LABEL", "Development"),
+ # Add other environment variables the frontend might need
+ }
+
+@router.options("/{fullPath:path}")
+@limiter.limit("60/minute")
+async def options_route(fullPath: str):
+ return Response(status_code=200)
+
+@router.get("/favicon.ico")
+@limiter.limit("30/minute")
+async def favicon():
+ return FileResponse(str(staticFolder / "favicon.ico"), media_type="image/x-icon")
diff --git a/modules/routes/routeAttributes.py b/modules/routes/routeAttributes.py
index 086d5a89..e3d82452 100644
--- a/modules/routes/routeAttributes.py
+++ b/modules/routes/routeAttributes.py
@@ -8,10 +8,11 @@ from pydantic import BaseModel
import logging
# Import auth module
-import modules.security.auth as auth
+from modules.security.auth import limiter, getCurrentUser
# Import the attribute definition and helper functions
-from modules.shared.defAttributes import AttributeDefinition, getModelAttributes
+from modules.interfaces.serviceAppModel import AttributeDefinition
+from modules.shared.attributeUtils import getModelClasses
# Configure logger
logger = logging.getLogger(__name__)
@@ -39,30 +40,6 @@ class AttributeResponse(BaseModel):
}
}
-def getModelClasses() -> Dict[str, Any]:
- """Dynamically get all model classes from all model modules"""
- modelClasses = {}
-
- # Get the interfaces directory path
- # Since we're in modules/routes/, we need to go up one level to modules/ then into interfaces/
- interfaces_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'interfaces')
-
- # Find all model files
- for filename in os.listdir(interfaces_dir):
- if filename.endswith('Model.py'):
- # Convert filename to module name (e.g., gatewayModel.py -> gatewayModel)
- module_name = filename[:-3]
-
- # Import the module dynamically
- module = importlib.import_module(f'modules.interfaces.{module_name}')
-
- # Get all classes from the module
- for name, obj in inspect.getmembers(module):
- if inspect.isclass(obj) and issubclass(obj, BaseModel) and obj != BaseModel:
- modelClasses[name.lower()] = obj
-
- return modelClasses
-
# Create a router for the attribute endpoints
router = APIRouter(
prefix="/api/attributes",
@@ -71,9 +48,10 @@ router = APIRouter(
)
@router.get("/{entityType}", response_model=AttributeResponse)
+@limiter.limit("30/minute")
async def get_entity_attributes(
entityType: str = Path(..., description="Type of entity (e.g. prompt)"),
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""
Retrieves the attribute definitions for a specific entity.
@@ -100,12 +78,13 @@ async def get_entity_attributes(
# Get model class and derive attributes from it
modelClass = modelClasses[entityType]
- attributes = getModelAttributes(modelClass, userLanguage)
+ attributes = modelClass.getModelAttributeDefinitions()
# Return only visible attributes
return AttributeResponse(attributes=[attr for attr in attributes if attr.visible])
@router.options("/{entityType}")
+@limiter.limit("60/minute")
async def options_entity_attributes(
entityType: str = Path(..., description="Type of entity (e.g. prompt)")
):
diff --git a/modules/routes/routeFiles.py b/modules/routes/routeDataFiles.py
similarity index 69%
rename from modules/routes/routeFiles.py
rename to modules/routes/routeDataFiles.py
index 5e9aae14..0ab1dafb 100644
--- a/modules/routes/routeFiles.py
+++ b/modules/routes/routeDataFiles.py
@@ -7,22 +7,23 @@ from dataclasses import dataclass
import io
# Import auth module
-import modules.security.auth as auth
+from modules.security.auth import limiter, getCurrentUser
# Import interfaces
-import modules.interfaces.lucydomInterface as lucydomInterface
-from modules.interfaces.lucydomModel import FileItem
+import modules.interfaces.serviceManagementClass as serviceManagementClass
+from modules.interfaces.serviceManagementModel import FileItem, getModelAttributeDefinitions
+from modules.interfaces.serviceAppModel import AttributeDefinition
# Configure logger
logger = logging.getLogger(__name__)
# Model attributes for FileItem
-fileAttributes = lucydomInterface.getModelAttributes(FileItem)
+fileAttributes = getModelAttributeDefinitions(FileItem)
# Create router for file endpoints
router = APIRouter(
prefix="/api/files",
- tags=["Files"],
+ tags=["Manage Files"],
responses={
404: {"description": "Not found"},
400: {"description": "Bad request"},
@@ -33,13 +34,14 @@ router = APIRouter(
)
@router.get("", response_model=List[FileItem])
-async def get_files(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)):
+@limiter.limit("30/minute")
+async def get_files(currentUser: Dict[str, Any] = Depends(getCurrentUser)):
"""Get all available files"""
try:
- interfaceLucydom = lucydomInterface.getInterface(currentUser)
+ managementInterface = serviceManagementClass.getInterface(currentUser)
# Get all files generically - only metadata, no binary data
- files = interfaceLucydom.getAllFiles()
+ files = managementInterface.getAllFiles()
return [FileItem(**file) for file in files]
except Exception as e:
logger.error(f"Error retrieving files: {str(e)}")
@@ -49,39 +51,40 @@ async def get_files(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveU
)
@router.post("/upload", status_code=status.HTTP_201_CREATED)
+@limiter.limit("10/minute")
async def upload_file(
file: UploadFile = File(...),
workflowId: Optional[str] = Form(None),
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Upload a file"""
try:
- interfaceLucydom = lucydomInterface.getInterface(currentUser)
+ managementInterface = serviceManagementClass.getInterface(currentUser)
# Read file
fileContent = await file.read()
# Check size limits
- maxSize = int(lucydomInterface.APP_CONFIG.get("File_Management_MAX_UPLOAD_SIZE_MB")) * 1024 * 1024 # in bytes
+ maxSize = int(serviceManagementClass.APP_CONFIG.get("File_Management_MAX_UPLOAD_SIZE_MB")) * 1024 * 1024 # in bytes
if len(fileContent) > maxSize:
raise HTTPException(
status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
- detail=f"File too large. Maximum size: {lucydomInterface.APP_CONFIG.get('File_Management_MAX_UPLOAD_SIZE_MB')}MB"
+ detail=f"File too large. Maximum size: {serviceManagementClass.APP_CONFIG.get('File_Management_MAX_UPLOAD_SIZE_MB')}MB"
)
# Save file via LucyDOM interface in the database
- fileMeta = interfaceLucydom.saveUploadedFile(fileContent, file.filename)
+ fileMeta = managementInterface.saveUploadedFile(fileContent, file.filename)
# If workflowId is provided, update the file information
if workflowId:
updateData = {"workflowId": workflowId}
- interfaceLucydom.updateFile(fileMeta["id"], updateData)
+ managementInterface.updateFile(fileMeta["id"], updateData)
fileMeta["workflowId"] = workflowId
# Successful response
return fileMeta
- except lucydomInterface.FileStorageError as e:
+ except serviceManagementClass.FileStorageError as e:
logger.error(f"Error during file upload (storage): {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
@@ -95,16 +98,17 @@ async def upload_file(
)
@router.get("/{fileId}")
+@limiter.limit("30/minute")
async def get_file(
fileId: str,
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Returns a file by its ID for download"""
try:
- interfaceLucydom = lucydomInterface.getInterface(currentUser)
+ managementInterface = serviceManagementClass.getInterface(currentUser)
# Get file via LucyDOM interface from the database
- fileData = interfaceLucydom.downloadFile(fileId)
+ fileData = managementInterface.downloadFile(fileId)
# Return file
headers = {
@@ -116,19 +120,19 @@ async def get_file(
headers=headers
)
- except lucydomInterface.FileNotFoundError as e:
+ except serviceManagementClass.FileNotFoundError as e:
logger.warning(f"File not found: {str(e)}")
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=str(e)
)
- except lucydomInterface.FilePermissionError as e:
+ except serviceManagementClass.FilePermissionError as e:
logger.warning(f"No permission for file: {str(e)}")
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=str(e)
)
- except lucydomInterface.FileError as e:
+ except serviceManagementClass.FileError as e:
logger.error(f"Error retrieving file: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
@@ -142,19 +146,20 @@ async def get_file(
)
@router.put("/{file_id}", response_model=FileItem)
+@limiter.limit("10/minute")
async def update_file(
file_id: str,
file_data: FileItem,
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""
Update file metadata
"""
try:
- interfaceLucydom = lucydomInterface.getInterface(currentUser)
+ managementInterface = serviceManagementClass.getInterface(currentUser)
# Get the file from the database
- file = interfaceLucydom.getFile(file_id)
+ file = managementInterface.getFile(file_id)
if not file:
raise HTTPException(status_code=404, detail="File not found")
@@ -166,12 +171,12 @@ async def update_file(
update_data = file_data.model_dump()
# Update the file
- result = interfaceLucydom.updateFile(file_id, update_data)
+ result = managementInterface.updateFile(file_id, update_data)
if not result:
raise HTTPException(status_code=500, detail="Failed to update file")
# Get updated file and convert to FileItem
- updatedFile = interfaceLucydom.getFile(file_id)
+ updatedFile = managementInterface.getFile(file_id)
return FileItem(**updatedFile)
except HTTPException as he:
@@ -181,33 +186,34 @@ async def update_file(
raise HTTPException(status_code=500, detail=str(e))
@router.delete("/{fileId}", status_code=status.HTTP_204_NO_CONTENT)
+@limiter.limit("10/minute")
async def delete_file(
fileId: str,
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Deletes a file by its ID from the database"""
try:
- interfaceLucydom = lucydomInterface.getInterface(currentUser)
+ managementInterface = serviceManagementClass.getInterface(currentUser)
# Delete file via LucyDOM interface
- interfaceLucydom.deleteFile(fileId)
+ managementInterface.deleteFile(fileId)
# Return successful deletion without content (204 No Content)
return Response(status_code=status.HTTP_204_NO_CONTENT)
- except lucydomInterface.FileNotFoundError as e:
+ except serviceManagementClass.FileNotFoundError as e:
logger.warning(f"File not found: {str(e)}")
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=str(e)
)
- except lucydomInterface.FilePermissionError as e:
+ except serviceManagementClass.FilePermissionError as e:
logger.warning(f"No permission to delete file: {str(e)}")
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=str(e)
)
- except lucydomInterface.FileDeletionError as e:
+ except serviceManagementClass.FileDeletionError as e:
logger.error(f"Error deleting file: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
@@ -221,15 +227,16 @@ async def delete_file(
)
@router.get("/stats", response_model=Dict[str, Any])
+@limiter.limit("30/minute")
async def get_file_stats(
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Returns statistics about the stored files"""
try:
- interfaceLucydom = lucydomInterface.getInterface(currentUser)
+ managementInterface = serviceManagementClass.getInterface(currentUser)
# Get all files - metadata only
- allFiles = interfaceLucydom.getAllFiles()
+ allFiles = managementInterface.getAllFiles()
# Calculate statistics
totalFiles = len(allFiles)
@@ -254,4 +261,19 @@ async def get_file_stats(
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error retrieving file statistics: {str(e)}"
- )
\ No newline at end of file
+ )
+
+@router.get("/attributes", response_model=List[AttributeDefinition])
+@limiter.limit("30/minute")
+async def get_file_attributes(
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
+):
+ """
+ Retrieves the attribute definitions for files.
+ This can be used for dynamic form generation.
+
+ Returns:
+ - A list of attribute definitions that can be used to generate forms
+ """
+ # Get attributes from the FileItem model class
+ return FileItem.getModelAttributeDefinitions()
\ No newline at end of file
diff --git a/modules/routes/routeMandates.py b/modules/routes/routeDataMandates.py
similarity index 66%
rename from modules/routes/routeMandates.py
rename to modules/routes/routeDataMandates.py
index d8bdea81..d6309cdb 100644
--- a/modules/routes/routeMandates.py
+++ b/modules/routes/routeDataMandates.py
@@ -1,33 +1,38 @@
-from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request
+from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request, Response
from typing import List, Dict, Any, Optional
from fastapi import status
import logging
# Import auth module
-import modules.security.auth as auth
+from modules.security.auth import limiter, getCurrentUser
# Import interfaces
-import modules.interfaces.gatewayInterface as gatewayInterface
-from modules.interfaces.gatewayModel import Mandate, getModelAttributes
+import modules.interfaces.serviceManagementClass as serviceManagementClass
+from modules.interfaces.serviceManagementModel import Mandate, getModelAttributeDefinitions
+
+# Import the model classes
+from modules.interfaces.serviceAppModel import AttributeDefinition
# Configure logger
logger = logging.getLogger(__name__)
# Model attributes for Mandate
-mandateAttributes = getModelAttributes(Mandate)
+mandateAttributes = getModelAttributeDefinitions(Mandate)
+# Create a router for the mandate endpoints
router = APIRouter(
prefix="/api/mandates",
- tags=["Mandates"],
+ tags=["Manage Mandates"],
responses={404: {"description": "Not found"}}
)
@router.get("/", response_model=List[Dict[str, Any]], tags=["Mandates"])
-async def get_mandates(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)):
+@limiter.limit("30/minute")
+async def get_mandates(currentUser: Dict[str, Any] = Depends(getCurrentUser)):
"""Get all mandates"""
try:
- interfaceGateway = gatewayInterface.getInterface(currentUser)
- return interfaceGateway.getMandates()
+ appInterface = serviceManagementClass.getInterface(currentUser)
+ return appInterface.getMandates()
except Exception as e:
logger.error(f"Error getting mandates: {str(e)}")
raise HTTPException(
@@ -36,14 +41,15 @@ async def get_mandates(currentUser: Dict[str, Any] = Depends(auth.getCurrentActi
)
@router.get("/{mandateId}", response_model=Dict[str, Any], tags=["Mandates"])
+@limiter.limit("30/minute")
async def get_mandate(
mandateId: str,
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Get a specific mandate by ID"""
try:
- interfaceGateway = gatewayInterface.getInterface(currentUser)
- mandate = interfaceGateway.getMandateById(mandateId)
+ appInterface = serviceManagementClass.getInterface(currentUser)
+ mandate = appInterface.getMandateById(mandateId)
if not mandate:
raise HTTPException(
@@ -62,16 +68,17 @@ async def get_mandate(
)
@router.post("/", response_model=Mandate, tags=["Mandates"])
+@limiter.limit("10/minute")
async def create_mandate(
mandateData: Mandate,
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Create a new mandate"""
try:
- interfaceGateway = gatewayInterface.getInterface(currentUser)
+ appInterface = serviceManagementClass.getInterface(currentUser)
try:
- createdMandate = interfaceGateway.createMandate(mandateData)
+ createdMandate = appInterface.createMandate(mandateData)
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
@@ -95,17 +102,18 @@ async def create_mandate(
)
@router.put("/{mandateId}", response_model=Mandate, tags=["Mandates"])
+@limiter.limit("10/minute")
async def update_mandate(
mandateId: str,
mandateData: Mandate,
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Update an existing mandate"""
try:
- interfaceGateway = gatewayInterface.getInterface(currentUser)
+ appInterface = serviceManagementClass.getInterface(currentUser)
# Check if mandate exists
- existingMandate = interfaceGateway.getMandateById(mandateId)
+ existingMandate = appInterface.getMandateById(mandateId)
if not existingMandate:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
@@ -114,7 +122,7 @@ async def update_mandate(
# Update mandate data
try:
- updatedMandate = interfaceGateway.updateMandate(mandateId, mandateData)
+ updatedMandate = appInterface.updateMandate(mandateId, mandateData)
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
@@ -138,16 +146,17 @@ async def update_mandate(
)
@router.delete("/{mandateId}", response_model=Dict[str, Any], tags=["Mandates"])
+@limiter.limit("10/minute")
async def delete_mandate(
mandateId: str,
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Delete a mandate"""
try:
- interfaceGateway = gatewayInterface.getInterface(currentUser)
+ appInterface = serviceManagementClass.getInterface(currentUser)
# Check if mandate exists
- existingMandate = interfaceGateway.getMandateById(mandateId)
+ existingMandate = appInterface.getMandateById(mandateId)
if not existingMandate:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
@@ -156,7 +165,7 @@ async def delete_mandate(
# Delete mandate
try:
- interfaceGateway.deleteMandate(mandateId)
+ appInterface.deleteMandate(mandateId)
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
@@ -171,4 +180,19 @@ async def delete_mandate(
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to delete mandate: {str(e)}"
- )
\ No newline at end of file
+ )
+
+@router.get("/attributes", response_model=List[AttributeDefinition])
+@limiter.limit("30/minute")
+async def get_mandate_attributes(
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
+):
+ """
+ Retrieves the attribute definitions for mandates.
+ This can be used for dynamic form generation.
+
+ Returns:
+ - A list of attribute definitions that can be used to generate forms
+ """
+ # Get attributes from the Mandate model class
+ return Mandate.getModelAttributeDefinitions()
\ No newline at end of file
diff --git a/modules/routes/routePrompts.py b/modules/routes/routeDataPrompts.py
similarity index 56%
rename from modules/routes/routePrompts.py
rename to modules/routes/routeDataPrompts.py
index 9239995a..c6ba1a04 100644
--- a/modules/routes/routePrompts.py
+++ b/modules/routes/routeDataPrompts.py
@@ -5,64 +5,65 @@ from datetime import datetime
import logging
# Import auth module
-import modules.security.auth as auth
+from modules.security.auth import limiter, getCurrentUser
# Import interfaces
-import modules.interfaces.lucydomInterface as lucydomInterface
-from modules.interfaces.lucydomModel import Prompt, getModelAttributes
+import modules.interfaces.serviceManagementClass as serviceManagementClass
+from modules.interfaces.serviceManagementModel import Prompt
+from modules.interfaces.serviceAppModel import AttributeDefinition
# Configure logger
logger = logging.getLogger(__name__)
-# Model attributes for Prompt
-promptAttributes = getModelAttributes(Prompt)
-
# Create router for prompt endpoints
router = APIRouter(
prefix="/api/prompts",
- tags=["Prompts"],
+ tags=["Manage Prompts"],
responses={404: {"description": "Not found"}}
)
@router.get("", response_model=List[Prompt])
+@limiter.limit("30/minute")
async def get_prompts(
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Get all prompts"""
- interfaceLucydom = lucydomInterface.getInterface(currentUser)
- prompts = interfaceLucydom.getAllPrompts()
+ managementInterface = serviceManagementClass.getInterface(currentUser)
+ prompts = managementInterface.getAllPrompts()
return [Prompt(**prompt) for prompt in prompts]
@router.post("", response_model=Prompt)
+@limiter.limit("10/minute")
async def create_prompt(
prompt: Prompt,
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Create a new prompt"""
- interfaceLucydom = lucydomInterface.getInterface(currentUser)
+ managementInterface = serviceManagementClass.getInterface(currentUser)
# Convert Prompt to dict for interface
prompt_data = prompt.model_dump()
# Create prompt
- newPrompt = interfaceLucydom.createPrompt(prompt_data)
+ newPrompt = managementInterface.createPrompt(prompt_data)
# Set current time for createdAt if it exists in the model
- if "createdAt" in promptAttributes and hasattr(newPrompt, "createdAt"):
+ if "createdAt" in Prompt.getModelAttributeDefinitions() and hasattr(newPrompt, "createdAt"):
newPrompt["createdAt"] = datetime.now().isoformat()
return Prompt(**newPrompt)
@router.get("/{promptId}", response_model=Prompt)
+@limiter.limit("30/minute")
async def get_prompt(
promptId: str = Path(..., description="ID of the prompt"),
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Get a specific prompt"""
- interfaceLucydom = lucydomInterface.getInterface(currentUser)
+ managementInterface = serviceManagementClass.getInterface(currentUser)
# Get prompt
- prompt = interfaceLucydom.getPrompt(promptId)
+ prompt = managementInterface.getPrompt(promptId)
if not prompt:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
@@ -72,16 +73,17 @@ async def get_prompt(
return Prompt(**prompt)
@router.put("/{promptId}", response_model=Prompt)
+@limiter.limit("10/minute")
async def update_prompt(
promptId: str = Path(..., description="ID of the prompt to update"),
promptData: Prompt = Body(...),
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Update an existing prompt"""
- interfaceLucydom = lucydomInterface.getInterface(currentUser)
+ managementInterface = serviceManagementClass.getInterface(currentUser)
# Check if the prompt exists
- existingPrompt = interfaceLucydom.getPrompt(promptId)
+ existingPrompt = managementInterface.getPrompt(promptId)
if not existingPrompt:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
@@ -92,7 +94,7 @@ async def update_prompt(
update_data = promptData.model_dump()
# Update prompt
- updatedPrompt = interfaceLucydom.updatePrompt(promptId, update_data)
+ updatedPrompt = managementInterface.updatePrompt(promptId, update_data)
if not updatedPrompt:
raise HTTPException(
@@ -103,26 +105,42 @@ async def update_prompt(
return Prompt(**updatedPrompt)
@router.delete("/{promptId}", response_model=Dict[str, Any])
+@limiter.limit("10/minute")
async def delete_prompt(
promptId: str = Path(..., description="ID of the prompt to delete"),
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Delete a prompt"""
- interfaceLucydom = lucydomInterface.getInterface(currentUser)
+ managementInterface = serviceManagementClass.getInterface(currentUser)
# Check if the prompt exists
- existingPrompt = interfaceLucydom.getPrompt(promptId)
+ existingPrompt = managementInterface.getPrompt(promptId)
if not existingPrompt:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Prompt with ID {promptId} not found"
)
- success = interfaceLucydom.deletePrompt(promptId)
+ success = managementInterface.deletePrompt(promptId)
if not success:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Error deleting the prompt"
)
- return {"message": f"Prompt with ID {promptId} successfully deleted"}
\ No newline at end of file
+ return {"message": f"Prompt with ID {promptId} successfully deleted"}
+
+@router.get("/attributes", response_model=List[AttributeDefinition])
+@limiter.limit("30/minute")
+async def get_prompt_attributes(
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
+):
+ """
+ Retrieves the attribute definitions for prompts.
+ This can be used for dynamic form generation.
+
+ Returns:
+ - A list of attribute definitions that can be used to generate forms
+ """
+ # Get attributes from the Prompt model class
+ return Prompt.getModelAttributeDefinitions()
\ No newline at end of file
diff --git a/modules/routes/routeUsers.py b/modules/routes/routeDataUsers.py
similarity index 64%
rename from modules/routes/routeUsers.py
rename to modules/routes/routeDataUsers.py
index cf65922c..0497fbeb 100644
--- a/modules/routes/routeUsers.py
+++ b/modules/routes/routeDataUsers.py
@@ -1,34 +1,36 @@
-from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request
+from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request, Response
from typing import List, Dict, Any, Optional
from fastapi import status
from datetime import datetime
import logging
+import inspect
+import importlib
+import os
+from pydantic import BaseModel
-# Import auth module
-import modules.security.auth as auth
+# Import interfaces and models
+import modules.interfaces.serviceManagementClass as serviceManagementClass
+from modules.security.auth import getCurrentUser, limiter, getCurrentUser
-# Import interfaces
-import modules.interfaces.gatewayInterface as gatewayInterface
-import modules.interfaces.gatewayModel as gatewayModel
+# Import the attribute definition and helper functions
+from modules.interfaces.serviceManagementModel import User, AttributeDefinition, getModelAttributeDefinitions
+from modules.interfaces.serviceAppModel import AttributeDefinition as ServiceAppAttributeDefinition
# Configure logger
logger = logging.getLogger(__name__)
-# Model attributes for User
-userAttributes = gatewayModel.getModelAttributes(gatewayModel.User)
-
router = APIRouter(
prefix="/api/users",
- tags=["Users"],
+ tags=["Manage Users"],
responses={404: {"description": "Not found"}}
)
@router.get("/", response_model=List[Dict[str, Any]], tags=["Users"])
-async def get_users(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)):
+async def get_users(currentUser: Dict[str, Any] = Depends(getCurrentUser)):
"""Get all users in the current mandate"""
try:
- interfaceGateway = gatewayInterface.getInterface(currentUser)
- return interfaceGateway.getUsers()
+ appInterface = serviceManagementClass.getInterface(currentUser)
+ return appInterface.getUsers()
except Exception as e:
logger.error(f"Error getting users: {str(e)}")
raise HTTPException(
@@ -39,12 +41,12 @@ async def get_users(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveU
@router.get("/{userId}", response_model=Dict[str, Any], tags=["Users"])
async def get_user(
userId: str,
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Get a specific user by ID"""
try:
- interfaceGateway = gatewayInterface.getInterface(currentUser)
- user = interfaceGateway.getUserById(userId)
+ appInterface = serviceManagementClass.getInterface(currentUser)
+ user = appInterface.getUserById(userId)
if not user:
raise HTTPException(
@@ -62,19 +64,19 @@ async def get_user(
detail=f"Failed to get user: {str(e)}"
)
-@router.post("/", response_model=gatewayModel.User, tags=["Users"])
+@router.post("/", response_model=User, tags=["Users"])
async def create_user(
- userData: gatewayModel.User,
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ userData: User,
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Create a new user"""
try:
- # Get admin user for user creation
- interfaceRoot = gatewayInterface.getRootInterface()
+ # Get interface for user creation
+ appInterface = serviceManagementClass.getInterface(currentUser)
try:
# Convert User model to dict and pass to createUser
- createdUser = interfaceRoot.createUser(
+ createdUser = appInterface.createUser(
username=userData.username,
email=userData.email,
fullName=userData.fullName,
@@ -105,19 +107,19 @@ async def create_user(
detail=f"Failed to create user: {str(e)}"
)
-@router.put("/{userId}", response_model=gatewayModel.User, tags=["Users"])
+@router.put("/{userId}", response_model=User, tags=["Users"])
async def update_user(
userId: str,
- userData: gatewayModel.User,
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ userData: User,
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Update an existing user"""
try:
- # Get admin user for user updates
- interfaceGateway = gatewayInterface.getInterface(currentUser)
+ # Get interface for user updates
+ appInterface = serviceManagementClass.getInterface(currentUser)
# Check if user exists
- existingUser = interfaceGateway.getUserById(userId)
+ existingUser = appInterface.getUserById(userId)
if not existingUser:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
@@ -126,7 +128,7 @@ async def update_user(
# Update user data
try:
- updatedUser = interfaceGateway.updateUser(userId, userData)
+ updatedUser = appInterface.updateUser(userId, userData)
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
@@ -152,12 +154,12 @@ async def update_user(
@router.delete("/{userId}", response_model=Dict[str, Any], tags=["Users"])
async def delete_user(
userId: str,
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Delete a user"""
try:
- interfaceGateway = gatewayInterface.getInterface(currentUser)
- interfaceGateway.deleteUser(userId)
+ appInterface = serviceManagementClass.getInterface(currentUser)
+ appInterface.deleteUser(userId)
return {"message": f"User {userId} deleted successfully"}
except ValueError as e:
raise HTTPException(
@@ -170,3 +172,18 @@ async def delete_user(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to delete user: {str(e)}"
)
+
+@router.get("/attributes", response_model=List[ServiceAppAttributeDefinition])
+@limiter.limit("30/minute")
+async def get_user_attributes(
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
+):
+ """
+ Retrieves the attribute definitions for users.
+ This can be used for dynamic form generation.
+
+ Returns:
+ - A list of attribute definitions that can be used to generate forms
+ """
+ # Get attributes from the User model class
+ return User.getModelAttributeDefinitions()
diff --git a/modules/routes/routeGeneral.py b/modules/routes/routeGeneral.py
deleted file mode 100644
index 7fb669ae..00000000
--- a/modules/routes/routeGeneral.py
+++ /dev/null
@@ -1,132 +0,0 @@
-from fastapi import APIRouter, HTTPException, Depends, Body, status, Response
-from fastapi.responses import FileResponse, HTMLResponse, RedirectResponse, JSONResponse
-from fastapi.security import OAuth2PasswordRequestForm
-from fastapi.staticfiles import StaticFiles
-from typing import Dict, Any, Optional
-from datetime import timedelta
-import pathlib
-import os
-import logging
-from pathlib import Path as FilePath
-
-from modules.shared.configuration import APP_CONFIG
-import modules.security.auth as auth
-import modules.interfaces.gatewayInterface as gatewayInterface
-
-router = APIRouter(
- prefix="",
- tags=["General"],
- responses={404: {"description": "Not found"}}
-)
-
-# Static folder setup - using absolute path from app root
-baseDir = FilePath(__file__).parent.parent.parent # Go up to gateway root
-staticFolder = baseDir / "static"
-os.makedirs(staticFolder, exist_ok=True)
-
-# Mount static files
-router.mount("/static", StaticFiles(directory=str(staticFolder), html=True), name="static")
-
-logger = logging.getLogger(__name__)
-
-@router.get("/", tags=["General"])
-async def root():
- """API status endpoint"""
- return {
- "status": "online",
- "message": "Data Platform API is active",
- "allowedOrigins": f"Allowed origins are {APP_CONFIG.get('APP_ALLOWED_ORIGINS')}"
- }
-
-@router.get("/api/environment", tags=["General"])
-async def get_environment():
- """Get environment configuration for frontend"""
- return {
- "apiBaseUrl": APP_CONFIG.get("APP_API_URL", ""),
- "environment": APP_CONFIG.get("APP_ENV", "development"),
- "instanceLabel": APP_CONFIG.get("APP_ENV_LABEL", "Development"),
- # Add other environment variables the frontend might need
- }
-
-@router.options("/{fullPath:path}", tags=["General"])
-async def options_route(fullPath: str):
- return Response(status_code=200)
-
-@router.post("/api/token", response_model=gatewayModel.Token, tags=["General"])
-async def login_for_access_token(
- formData: OAuth2PasswordRequestForm = Depends(),
- authority: str = "local",
- external_token: Optional[str] = None
-):
- """Get access token for user authentication"""
- # Create a new gateway interface instance with admin context
- interfaceRoot = gatewayInterface.getRootInterface()
-
- try:
- # Get token directly
- token = interfaceRoot.authenticateAndGetToken(
- username=formData.username,
- password=formData.password,
- authority=authority,
- external_token=external_token
- )
- return token
- except ValueError as e:
- # Handle authentication errors
- error_msg = str(e)
- logger.warning(f"Authentication failed for user {formData.username}: {error_msg}")
- raise HTTPException(
- status_code=status.HTTP_401_UNAUTHORIZED,
- detail=error_msg,
- headers={"WWW-Authenticate": "Bearer"},
- )
- except Exception as e:
- # Handle other errors
- error_msg = f"Login failed: {str(e)}"
- logger.error(f"Unexpected error during login for user {formData.username}: {error_msg}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=error_msg
- )
-
-@router.get("/api/user/me", response_model=Dict[str, Any], tags=["General"])
-async def read_user_me(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)):
- return currentUser
-
-@router.post("/api/user/register", response_model=gatewayModel.User, tags=["General"])
-async def register_user(userData: gatewayModel.User):
- """Register a new user."""
- try:
- interfaceRoot = gatewayInterface.getRootInterface()
- return interfaceRoot.registerUser(userData.model_dump())
- except ValueError as e:
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail=str(e)
- )
- except Exception as e:
- logger.error(f"Error registering user: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Failed to register user: {str(e)}"
- )
-
-@router.get("/api/user/available", response_model=Dict[str, Any], tags=["General"])
-async def check_username_availability(
- username: str,
- authenticationAuthority: str = "local"
-):
- """Check if a username is available for registration"""
- try:
- interfaceRoot = gatewayInterface.getRootInterface()
- return interfaceRoot.checkUsernameAvailability(username, authenticationAuthority)
- except Exception as e:
- logger.error(f"Error checking username availability: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Failed to check username availability: {str(e)}"
- )
-
-@router.get("/favicon.ico", tags=["General"])
-async def favicon():
- return FileResponse(str(staticFolder / "favicon.ico"), media_type="image/x-icon")
diff --git a/modules/routes/routeGoogle.py b/modules/routes/routeGoogle.py
deleted file mode 100644
index bfd96f2b..00000000
--- a/modules/routes/routeGoogle.py
+++ /dev/null
@@ -1,322 +0,0 @@
-"""
-Routes for Google authentication.
-"""
-
-from fastapi import APIRouter, HTTPException, Depends, Request, Response, status, Cookie, Body
-from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse
-import logging
-import json
-from typing import Dict, Any, Optional, List
-from datetime import datetime, timedelta
-
-# Import auth module
-import modules.security.auth as auth
-
-# Import interfaces
-import modules.interfaces.googleInterface as googleInterface
-import modules.interfaces.gatewayInterface as gatewayInterface
-from modules.interfaces.googleModel import (
- GoogleToken,
- GoogleUserInfo,
- GoogleAuthStatus,
- GoogleTokenResponse,
- GoogleSaveTokenResponse
-)
-
-# Configure logger
-logger = logging.getLogger(__name__)
-
-# Create router for Google Auth endpoints
-router = APIRouter(
- prefix="/api/google",
- tags=["Google"],
- responses={
- 404: {"description": "Not found"},
- 400: {"description": "Bad request"},
- 401: {"description": "Unauthorized"},
- 403: {"description": "Forbidden"},
- 500: {"description": "Internal server error"}
- }
-)
-
-@router.get("/login")
-async def login():
- """Initiate Google login for the current user"""
- try:
- # Get Google interface with root context for initial setup
- google = googleInterface.getRootInterface()
-
- # Get login URL
- auth_url = google.initiateLogin()
- if not auth_url:
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to initiate Google login"
- )
-
- logger.info("Redirecting to Google login")
- return RedirectResponse(auth_url)
-
- except Exception as e:
- logger.error(f"Error initiating Google login: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Failed to initiate Google login: {str(e)}"
- )
-
-@router.get("/auth/callback")
-async def auth_callback(code: str, state: str, request: Request):
- """Handle Google OAuth callback"""
- try:
- # Get Google interface with root context for initial setup
- google = googleInterface.getRootInterface()
-
- # Handle auth callback
- token_response = google.handleAuthCallback(code)
- if not token_response:
- return HTMLResponse(
- content="""
-
-
- Authentication Failed
-
-
-
-
Authentication Failed
-
Could not acquire access token.
-
-
-
- """,
- status_code=400
- )
-
- # Get gateway interface for user operations
- gateway = gatewayInterface.getRootInterface()
-
- # Check if user exists
- user = gateway.getUserByUsername(token_response.user_info["email"])
-
- # If user doesn't exist, create a new user in the default mandate
- if not user:
- try:
- # Get the root mandate ID
- rootMandateId = gateway.getInitialId("mandates")
- if not rootMandateId:
- raise ValueError("Root mandate not found")
-
- # Create new user with Google authentication
- user = gateway.createUser(
- username=token_response.user_info["email"],
- email=token_response.user_info["email"],
- fullName=token_response.user_info.get("name", token_response.user_info["email"]),
- mandateId=rootMandateId,
- authenticationAuthority="google"
- )
- logger.info(f"Created new user for Google account: {token_response.user_info['email']}")
-
- # Verify user was created by retrieving it
- user = gateway.getUserByUsername(token_response.user_info["email"])
- if not user:
- raise ValueError("Failed to retrieve created user")
-
- except Exception as e:
- logger.error(f"Failed to create user for Google account: {str(e)}")
- return HTMLResponse(
- content="""
-
-
- Registration Failed
-
-
-
-
-
-
-
- """
- )
-
- return response
-
- except Exception as e:
- logger.error(f"Error in auth callback: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Authentication failed: {str(e)}"
- )
-
-@router.get("/status", response_model=GoogleAuthStatus)
-async def auth_status(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)):
- """Check Google authentication status"""
- try:
- # For authenticated endpoints, use the current user's context
- google = googleInterface.getInterface(currentUser)
-
- # Get current user token and info
- user_info, access_token = google.getCurrentUserToken()
-
- if not user_info or not access_token:
- return GoogleAuthStatus(
- authenticated=False,
- message="Not authenticated with Google"
- )
-
- # Convert user_info to GoogleUserInfo model
- user_info_model = GoogleUserInfo(**user_info)
-
- return GoogleAuthStatus(
- authenticated=True,
- user=user_info_model
- )
-
- except Exception as e:
- logger.error(f"Error checking authentication status: {str(e)}")
- return GoogleAuthStatus(
- authenticated=False,
- message=f"Error checking authentication status: {str(e)}"
- )
-
-@router.get("/token", response_model=GoogleTokenResponse)
-async def get_token(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)):
- """Get Google token for current user."""
- try:
- # For authenticated endpoints, use the current user's context
- google = googleInterface.getInterface(currentUser)
-
- # Get token
- token_data = google.getGoogleToken()
- if not token_data:
- raise HTTPException(
- status_code=status.HTTP_404_NOT_FOUND,
- detail="No token found"
- )
-
- # Convert to GoogleToken model
- token = GoogleToken(**token_data)
- return GoogleTokenResponse(token=token)
-
- except Exception as e:
- logger.error(f"Error getting token: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=str(e)
- )
-
-@router.post("/save-token", response_model=GoogleSaveTokenResponse)
-async def save_token(
- token_data: GoogleToken,
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
-):
- """Save Google token data from frontend"""
- try:
- # For authenticated endpoints, use the current user's context
- google = googleInterface.getInterface(currentUser)
-
- # Save token
- success = google.saveGoogleToken(token_data.model_dump())
-
- if success:
- return GoogleSaveTokenResponse(
- success=True,
- message="Token saved successfully",
- token=token_data
- )
- else:
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to save token"
- )
-
- except Exception as e:
- logger.error(f"Error saving token: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Error saving token: {str(e)}"
- )
-
-@router.post("/logout")
-async def logout(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)):
- """Logout from Google"""
- try:
- # For authenticated endpoints, use the current user's context
- google = googleInterface.getInterface(currentUser)
-
- # Delete token
- success = google.deleteGoogleToken()
-
- if success:
- return JSONResponse({
- "message": "Successfully logged out from Google"
- })
- else:
- return JSONResponse({
- "message": "Failed to logout from Google"
- })
-
- except Exception as e:
- logger.error(f"Error during logout: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Logout failed: {str(e)}"
- )
\ No newline at end of file
diff --git a/modules/routes/routeMsft.py b/modules/routes/routeMsft.py
deleted file mode 100644
index 514e0a1e..00000000
--- a/modules/routes/routeMsft.py
+++ /dev/null
@@ -1,322 +0,0 @@
-"""
-Routes for Microsoft authentication.
-"""
-
-from fastapi import APIRouter, HTTPException, Depends, Request, Response, status, Cookie, Body
-from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse
-import logging
-import json
-from typing import Dict, Any, Optional, List
-from datetime import datetime, timedelta
-
-# Import auth module
-import modules.security.auth as auth
-
-# Import interfaces
-import modules.interfaces.msftInterface as msftInterface
-import modules.interfaces.gatewayInterface as gatewayInterface
-from modules.interfaces.msftModel import (
- MsftToken,
- MsftUserInfo,
- MsftAuthStatus,
- MsftTokenResponse,
- MsftSaveTokenResponse
-)
-
-# Configure logger
-logger = logging.getLogger(__name__)
-
-# Create router for Microsoft Auth endpoints
-router = APIRouter(
- prefix="/api/msft",
- tags=["Microsoft"],
- responses={
- 404: {"description": "Not found"},
- 400: {"description": "Bad request"},
- 401: {"description": "Unauthorized"},
- 403: {"description": "Forbidden"},
- 500: {"description": "Internal server error"}
- }
-)
-
-@router.get("/login")
-async def login():
- """Initiate Microsoft login for the current user"""
- try:
- # Get Microsoft interface with root context for initial setup
- msft = msftInterface.getRootInterface()
-
- # Get login URL
- auth_url = msft.initiateLogin()
- if not auth_url:
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to initiate Microsoft login"
- )
-
- logger.info("Redirecting to Microsoft login")
- return RedirectResponse(auth_url)
-
- except Exception as e:
- logger.error(f"Error initiating Microsoft login: {str(e)}")
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Failed to initiate Microsoft login: {str(e)}"
- )
-
-@router.get("/auth/callback")
-async def auth_callback(code: str, state: str, request: Request):
- """Handle Microsoft OAuth callback"""
- try:
- # Get Microsoft interface with root context for initial setup
- msft = msftInterface.getRootInterface()
-
- # Handle auth callback
- token_response = msft.handleAuthCallback(code)
- if not token_response:
- return HTMLResponse(
- content="""
-
-
- Authentication Failed
-
-
-
-
Authentication Failed
-
Could not acquire access token.
-
-
-
- """,
- status_code=400
- )
-
- # Get gateway interface for user operations
- gateway = gatewayInterface.getRootInterface()
-
- # Check if user exists
- user = gateway.getUserByUsername(token_response.user_info["email"])
-
- # If user doesn't exist, create a new user in the default mandate
- if not user:
- try:
- # Get the root mandate ID
- rootMandateId = gateway.getInitialId("mandates")
- if not rootMandateId:
- raise ValueError("Root mandate not found")
-
- # Create new user with Microsoft authentication
- user = gateway.createUser(
- username=token_response.user_info["email"],
- email=token_response.user_info["email"],
- fullName=token_response.user_info.get("name", token_response.user_info["email"]),
- mandateId=rootMandateId,
- authenticationAuthority="microsoft"
- )
- logger.info(f"Created new user for Microsoft account: {token_response.user_info['email']}")
-
- # Verify user was created by retrieving it
- user = gateway.getUserByUsername(token_response.user_info["email"])
- if not user:
- raise ValueError("Failed to retrieve created user")
-
- except Exception as e:
- logger.error(f"Failed to create user for Microsoft account: {str(e)}")
- return HTMLResponse(
- content="""
-
-
- Registration Failed
-
-
-
-
",
+ status_code=400
+ )
+
+ # Create token data
+ token_data = {
+ "access_token": token_response["access_token"],
+ "refresh_token": token_response.get("refresh_token", ""),
+ "token_type": token_response.get("token_type", "bearer"),
+ "expires_at": datetime.now().timestamp() + token_response.get("expires_in", 0)
+ }
+
+ # Save token data
+ appInterface = getInterface()
+ saveToken(appInterface, "Msft", token_data)
+
+ # Return success page with token data
+ return HTMLResponse(
+ content=f"""
+
+ Authentication Successful
+
+
+
+
+ """
+ )
+
+ except Exception as e:
+ logger.error(f"Error in auth callback: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Authentication failed: {str(e)}"
+ )
+
+@router.post("/logout")
+@limiter.limit("30/minute")
+async def logout(currentUser: Dict[str, Any] = Depends(getCurrentUser)):
+ """Logout from Microsoft"""
+ try:
+ # Get user interface
+ appInterface = getInterface()
+
+ # Revoke all sessions for the user
+ appInterface.revokeAllUserSessions(currentUser.get("id"))
+
+ return JSONResponse({
+ "message": "Successfully logged out from Microsoft"
+ })
+
+ except Exception as e:
+ logger.error(f"Error during logout: {str(e)}")
+ raise HTTPException(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail=f"Logout failed: {str(e)}"
+ )
diff --git a/modules/routes/routeWorkflows.py b/modules/routes/routeWorkflows.py
index c8d5ab4c..8cea957c 100644
--- a/modules/routes/routeWorkflows.py
+++ b/modules/routes/routeWorkflows.py
@@ -10,33 +10,32 @@ from typing import List, Dict, Any, Optional
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Query, Response, status
from datetime import datetime
-# Import auth module
-import modules.security.auth as auth
+# Import auth modules
+from modules.security.auth import limiter, getCurrentUser
# Import interfaces
-import modules.interfaces.lucydomInterface as lucydomInterface
-import modules.interfaces.msftInterface as msftInterface
-import modules.interfaces.googleInterface as googleInterface
+import modules.interfaces.serviceChatClass as serviceChatClass
# Import workflow manager
from modules.workflow.workflowManager import getWorkflowManager
# Import models
-from modules.interfaces.lucydomModel import (
+from modules.interfaces.serviceChatModel import (
ChatWorkflow,
ChatMessage,
ChatLog,
ChatStat,
ChatDocument,
UserInputRequest,
- getModelAttributes
+ Workflow,
+ getModelAttributeDefinitions
)
# Configure logger
logger = logging.getLogger(__name__)
# Model attributes for ChatWorkflow
-workflowAttributes = getModelAttributes(ChatWorkflow)
+workflowAttributes = getModelAttributeDefinitions(ChatWorkflow)
# Create router for workflow endpoints
router = APIRouter(
@@ -48,23 +47,21 @@ router = APIRouter(
def createServiceContainer(currentUser: Dict[str, Any]):
"""Create a service container with all required interfaces."""
# Get all interfaces
- interfaceBase = lucydomInterface.getInterface(currentUser)
- interfaceMsft = msftInterface.getInterface(currentUser)
- interfaceGoogle = googleInterface.getInterface(currentUser)
+ chatInterface = serviceChatClass.getInterface(currentUser)
# Create service container
service = type('ServiceContainer', (), {
- 'base': interfaceBase,
- 'msft': interfaceMsft,
- 'google': interfaceGoogle
+ 'user': currentUser,
+ 'functions': chatInterface
})
return service
# API Endpoint for getting all workflows
@router.get("", response_model=List[ChatWorkflow])
+@limiter.limit("30/minute")
async def list_workflows(
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""List all workflows for the current user."""
try:
@@ -83,10 +80,11 @@ async def list_workflows(
# State 1: Workflow Initialization endpoint
@router.post("/start", response_model=ChatWorkflow)
+@limiter.limit("10/minute")
async def start_workflow(
workflowId: Optional[str] = Query(None, description="Optional ID of the workflow to continue"),
userInput: UserInputRequest = Body(...),
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""
Starts a new workflow or continues an existing one.
@@ -110,9 +108,10 @@ async def start_workflow(
# State 8: Workflow Stopped endpoint
@router.post("/{workflowId}/stop", response_model=ChatWorkflow)
+@limiter.limit("10/minute")
async def stop_workflow(
workflowId: str = Path(..., description="ID of the workflow to stop"),
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Stops a running workflow."""
try:
@@ -133,9 +132,10 @@ async def stop_workflow(
# State 11: Workflow Reset/Deletion endpoint
@router.delete("/{workflowId}", response_model=Dict[str, Any])
+@limiter.limit("10/minute")
async def delete_workflow(
workflowId: str = Path(..., description="ID of the workflow to delete"),
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Deletes a workflow and its associated data."""
try:
@@ -181,9 +181,10 @@ async def delete_workflow(
# API Endpoint for workflow status
@router.get("/{workflowId}/status", response_model=ChatWorkflow)
+@limiter.limit("30/minute")
async def get_workflow_status(
workflowId: str = Path(..., description="ID of the workflow"),
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Get the current status of a workflow."""
try:
@@ -210,10 +211,11 @@ async def get_workflow_status(
# API Endpoint for workflow logs with selective data transfer
@router.get("/{workflowId}/logs", response_model=List[ChatLog])
+@limiter.limit("30/minute")
async def get_workflow_logs(
workflowId: str = Path(..., description="ID of the workflow"),
logId: Optional[str] = Query(None, description="Optional log ID to get only newer logs"),
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Get logs for a workflow with support for selective data transfer."""
try:
@@ -251,10 +253,11 @@ async def get_workflow_logs(
# API Endpoint for workflow messages with selective data transfer
@router.get("/{workflowId}/messages", response_model=List[ChatMessage])
+@limiter.limit("30/minute")
async def get_workflow_messages(
workflowId: str = Path(..., description="ID of the workflow"),
messageId: Optional[str] = Query(None, description="Optional message ID to get only newer messages"),
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Get messages for a workflow with support for selective data transfer."""
try:
@@ -301,10 +304,11 @@ async def get_workflow_messages(
# Document Management Endpoints
@router.delete("/{workflowId}/messages/{messageId}", response_model=Dict[str, Any])
+@limiter.limit("10/minute")
async def delete_workflow_message(
workflowId: str = Path(..., description="ID of the workflow"),
messageId: str = Path(..., description="ID of the message to delete"),
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Delete a message from a workflow."""
try:
@@ -349,11 +353,12 @@ async def delete_workflow_message(
)
@router.delete("/{workflowId}/messages/{messageId}/files/{fileId}", response_model=Dict[str, Any])
+@limiter.limit("10/minute")
async def delete_file_from_message(
workflowId: str = Path(..., description="ID of the workflow"),
messageId: str = Path(..., description="ID of the message"),
fileId: str = Path(..., description="ID of the file to delete"),
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Delete a file reference from a message in a workflow."""
try:
@@ -395,9 +400,10 @@ async def delete_file_from_message(
# File preview and download routes
@router.get("/files/{fileId}/preview", response_model=ChatDocument)
+@limiter.limit("30/minute")
async def preview_file(
fileId: str = Path(..., description="ID of the file to preview"),
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Get file metadata and a preview of the file content."""
try:
@@ -489,9 +495,10 @@ async def preview_file(
)
@router.get("/files/{fileId}/download")
+@limiter.limit("30/minute")
async def download_file(
fileId: str = Path(..., description="ID of the file to download"),
- currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
+ currentUser: Dict[str, Any] = Depends(getCurrentUser)
):
"""Download a file."""
try:
diff --git a/modules/security/auth.py b/modules/security/auth.py
index 8d2ecfc2..a2ad51b3 100644
--- a/modules/security/auth.py
+++ b/modules/security/auth.py
@@ -5,25 +5,33 @@ Handles JWT-based authentication, token generation, and user context.
from datetime import datetime, timedelta, timezone
from typing import Optional, Dict, Any, Tuple
-from fastapi import Depends, HTTPException, status
+from fastapi import Depends, HTTPException, status, Request
from fastapi.security import OAuth2PasswordBearer
from jose import JWTError, jwt
import logging
+from slowapi import Limiter
+from slowapi.util import get_remote_address
from modules.shared.configuration import APP_CONFIG
+from modules.interfaces.serviceAppClass import getRootInterface
+from modules.interfaces.serviceAppModel import Session, AuthEvent, UserPrivilege
# Get Config Data
SECRET_KEY = APP_CONFIG.get("APP_JWT_SECRET_SECRET")
ALGORITHM = APP_CONFIG.get("Auth_ALGORITHM")
ACCESS_TOKEN_EXPIRE_MINUTES = int(APP_CONFIG.get("APP_TOKEN_EXPIRY"))
+REFRESH_TOKEN_EXPIRE_DAYS = int(APP_CONFIG.get("APP_REFRESH_TOKEN_EXPIRY", "7"))
# OAuth2 Setup
oauth2Scheme = OAuth2PasswordBearer(tokenUrl="token")
+# Rate Limiter
+limiter = Limiter(key_func=get_remote_address)
+
# Logger
logger = logging.getLogger(__name__)
-def createAccessToken(data: dict, expiresDelta: Optional[timedelta] = None) -> str:
+def createAccessToken(data: dict, expiresDelta: Optional[timedelta] = None) -> Tuple[str, datetime]:
"""
Creates a JWT Access Token.
@@ -32,7 +40,7 @@ def createAccessToken(data: dict, expiresDelta: Optional[timedelta] = None) -> s
expiresDelta: Validity duration of the token (optional)
Returns:
- JWT Token as string
+ Tuple of (JWT Token as string, expiration datetime)
"""
toEncode = data.copy()
@@ -44,9 +52,27 @@ def createAccessToken(data: dict, expiresDelta: Optional[timedelta] = None) -> s
toEncode.update({"exp": expire})
encodedJwt = jwt.encode(toEncode, SECRET_KEY, algorithm=ALGORITHM)
- return encodedJwt
+ return encodedJwt, expire
-def _getCurrentUser(token: str = Depends(oauth2Scheme)) -> Dict[str, Any]:
+def createRefreshToken(data: dict) -> Tuple[str, datetime]:
+ """
+ Creates a JWT Refresh Token.
+
+ Args:
+ data: Data to encode (usually user ID or username)
+
+ Returns:
+ Tuple of (JWT Token as string, expiration datetime)
+ """
+ toEncode = data.copy()
+ expire = datetime.now(timezone.utc) + timedelta(days=REFRESH_TOKEN_EXPIRE_DAYS)
+
+ toEncode.update({"exp": expire, "type": "refresh"})
+ encodedJwt = jwt.encode(toEncode, SECRET_KEY, algorithm=ALGORITHM)
+
+ return encodedJwt, expire
+
+def _getUserBase(token: str = Depends(oauth2Scheme)) -> Dict[str, Any]:
"""
Extracts and validates the current user from the JWT token.
@@ -75,11 +101,11 @@ def _getCurrentUser(token: str = Depends(oauth2Scheme)) -> Dict[str, Any]:
raise credentialsException
# Extract mandate ID and user ID from token
- _mandateId: str = payload.get("_mandateId")
- _userId: str = payload.get("_userId")
+ mandateId: str = payload.get("mandateId")
+ userId: str = payload.get("userId")
- if not _mandateId or not _userId:
- logger.error(f"Missing context in token: _mandateId={_mandateId}, _userId={_userId}")
+ if not mandateId or not userId:
+ logger.error(f"Missing context in token: mandateId={mandateId}, userId={userId}")
raise credentialsException
except JWTError:
@@ -87,10 +113,10 @@ def _getCurrentUser(token: str = Depends(oauth2Scheme)) -> Dict[str, Any]:
raise credentialsException
# Initialize Gateway Interface with context
- gateway = getRootInterface()
+ appInterface = getRootInterface()
# Retrieve user from database
- user = gateway.getUserByUsername(username)
+ user = appInterface.getUserByUsername(username)
if user is None:
logger.warning(f"User {username} not found")
@@ -101,27 +127,86 @@ def _getCurrentUser(token: str = Depends(oauth2Scheme)) -> Dict[str, Any]:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User is disabled")
# Ensure the user has the correct context
- if str(user.get("_mandateId")) != str(_mandateId) or str(user.get("id")) != str(_userId):
- logger.error(f"User context mismatch: token(_mandateId={_mandateId}, _userId={_userId}) vs user(_mandateId={user.get('_mandateId')}, id={user.get('id')})")
+ if str(user.get("mandateId")) != str(mandateId) or str(user.get("id")) != str(userId):
+ logger.error(f"User context mismatch: token(mandateId={mandateId}, userId={userId}) vs user(mandateId={user.get('mandateId')}, id={user.get('id')})")
raise credentialsException
- # Add authentication authority to user data
- user["authenticationAuthority"] = user.get("authenticationAuthority", "local")
-
return user
-def getCurrentActiveUser(currentUser: Dict[str, Any] = Depends(_getCurrentUser)) -> Dict[str, Any]:
+def getCurrentUser(currentUser: Dict[str, Any] = Depends(_getUserBase)) -> Dict[str, Any]:
+ """Get current active user with additional validation."""
if currentUser.get("disabled", False):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="User is disabled"
)
-
- auth_authority = currentUser.get("authenticationAuthority", "local")
- if auth_authority not in ["local", "microsoft"]:
- raise HTTPException(
- status_code=status.HTTP_403_FORBIDDEN,
- detail=f"Invalid authentication authority: {auth_authority}"
- )
-
return currentUser
+
+def createUserSession(userId: str, tokenId: str, request: Request) -> Session:
+ """Create a new user session."""
+ appInterface = getRootInterface()
+
+ session = Session(
+ userId=userId,
+ tokenId=tokenId,
+ expiresAt=datetime.now(timezone.utc) + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES),
+ ipAddress=request.client.host if request.client else None,
+ userAgent=request.headers.get("user-agent")
+ )
+
+ # Save session to database
+ appInterface.db.recordCreate("sessions", session.model_dump())
+
+ return session
+
+def logAuthEvent(userId: str, eventType: str, details: Dict[str, Any], request: Request) -> None:
+ """Log an authentication event."""
+ appInterface = getRootInterface()
+
+ event = AuthEvent(
+ userId=userId,
+ eventType=eventType,
+ details=details,
+ ipAddress=request.client.host if request.client else None,
+ userAgent=request.headers.get("user-agent")
+ )
+
+ # Save event to database
+ appInterface.db.recordCreate("auth_events", event.model_dump())
+
+def validateSession(sessionId: str) -> bool:
+ """Validate a user session."""
+ appInterface = getRootInterface()
+
+ session = appInterface.db.getRecordset("sessions", recordFilter={"id": sessionId})
+ if not session:
+ return False
+
+ session = session[0]
+ if datetime.now(timezone.utc) > session["expiresAt"]:
+ return False
+
+ # Update last activity
+ appInterface.db.recordModify("sessions", sessionId, {
+ "lastActivity": datetime.now(timezone.utc)
+ })
+
+ return True
+
+def revokeSession(sessionId: str) -> None:
+ """Revoke a user session."""
+ appInterface = getRootInterface()
+
+ # Delete session
+ appInterface.db.recordDelete("sessions", sessionId)
+
+def revokeAllUserSessions(userId: str) -> None:
+ """Revoke all sessions for a user."""
+ appInterface = getRootInterface()
+
+ # Get all sessions for user
+ sessions = appInterface.db.getRecordset("sessions", recordFilter={"userId": userId})
+
+ # Delete each session
+ for session in sessions:
+ appInterface.db.recordDelete("sessions", session["id"])
diff --git a/modules/shared/attributeUtils.py b/modules/shared/attributeUtils.py
new file mode 100644
index 00000000..f6db84aa
--- /dev/null
+++ b/modules/shared/attributeUtils.py
@@ -0,0 +1,154 @@
+"""
+Shared utilities for model attributes and labels.
+"""
+
+from pydantic import BaseModel, Field
+from typing import Dict, Any, List, Type, ClassVar
+import inspect
+import importlib
+import os
+
+class BaseModelWithUI(BaseModel):
+ """Base model class with UI support and common functionality"""
+
+ @classmethod
+ def get_ui_schema(cls) -> Dict[str, Any]:
+ """Get UI schema for frontend"""
+ return {
+ "fields": cls.fieldLabels if hasattr(cls, 'fieldLabels') else {},
+ "validations": cls.get_validations() if hasattr(cls, 'get_validations') else {}
+ }
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Convert to dictionary with proper validation"""
+ return self.model_dump()
+
+ @classmethod
+ def from_dict(cls, data: Dict[str, Any]) -> 'BaseModelWithUI':
+ """Create instance from dictionary with validation"""
+ return cls(**data)
+
+ @classmethod
+ def getModelAttributeDefinitions(cls) -> Dict[str, Any]:
+ """
+ Get attribute definitions for this model class.
+ Override this method in model classes to provide custom attribute definitions.
+
+ Returns:
+ Dict[str, Any]: Dictionary of attribute definitions
+ """
+ return {
+ name: {
+ "type": field.annotation.__name__ if hasattr(field.annotation, "__name__") else str(field.annotation),
+ "required": field.is_required() if hasattr(field, "is_required") else True,
+ "description": field.description if hasattr(field, "description") else "",
+ "label": cls.fieldLabels.get(name, Label(default=name)).getLabel() if hasattr(cls, "fieldLabels") else name
+ }
+ for name, field in cls.model_fields.items()
+ }
+
+def getModelAttributes(modelClass):
+ """
+ Get all attributes of a model class.
+
+ Args:
+ modelClass: The model class to get attributes from
+
+ Returns:
+ List[str]: List of attribute names
+ """
+ return [attr for attr in dir(modelClass)
+ if not callable(getattr(modelClass, attr))
+ and not attr.startswith('_')
+ and attr not in ('metadata', 'query', 'query_class', 'label', 'field_labels')]
+
+class Label(BaseModel):
+ """
+ Label for an attribute or a class with support for multiple languages.
+
+ Attributes:
+ default: Default label text
+ translations: Dictionary of translations for different languages
+ """
+ default: str = Field(..., description="Default label text")
+ translations: Dict[str, str] = Field(default_factory=dict, description="Translations for different languages")
+
+ class Config:
+ title = "Label"
+ description = "A label with support for multiple languages"
+ schema_extra = {
+ "example": {
+ "default": "Document",
+ "translations": {
+ "en": "Document",
+ "fr": "Document"
+ }
+ }
+ }
+
+ def getLabel(self, language: str = None) -> str:
+ """
+ Returns the label in the specified language, or the default value if not available.
+
+ Args:
+ language: Language code to get the label for
+
+ Returns:
+ str: Label text in the specified language or default
+ """
+ if language and language in self.translations:
+ return self.translations[language]
+ return self.default
+
+def getModelClasses() -> Dict[str, Type[BaseModel]]:
+ """
+ Dynamically get all model classes from all model modules.
+
+ Returns:
+ Dict[str, Type[BaseModel]]: Dictionary of model class names to their classes
+ """
+ modelClasses = {}
+
+ # Get the interfaces directory path
+ interfaces_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'interfaces')
+
+ # Find all model files
+ for filename in os.listdir(interfaces_dir):
+ if filename.endswith('Model.py'):
+ # Convert filename to module name (e.g., gatewayModel.py -> gatewayModel)
+ module_name = filename[:-3]
+
+ # Import the module dynamically
+ module = importlib.import_module(f'modules.interfaces.{module_name}')
+
+ # Get all classes from the module
+ for name, obj in inspect.getmembers(module):
+ if inspect.isclass(obj) and issubclass(obj, BaseModel) and obj != BaseModel:
+ modelClasses[name] = obj
+
+ return modelClasses
+
+def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguage: str = "en") -> Dict[str, Any]:
+ """
+ Get attribute definitions for model classes.
+ If modelClass is provided, returns attributes for that specific class.
+ If no modelClass is provided, returns attributes for all model classes.
+
+ Args:
+ modelClass: Optional specific model class to get attributes for
+ userLanguage: Language code for translations (default: "en")
+
+ Returns:
+ Dict[str, Any]: Dictionary of model class names to their attribute definitions
+ """
+ if modelClass:
+ return getModelAttributes(modelClass)
+
+ # Get all model classes
+ modelClasses = getModelClasses()
+
+ # Create dictionary of model class names to their attribute definitions
+ return {
+ name: getModelAttributes(cls)
+ for name, cls in modelClasses.items()
+ }
\ No newline at end of file
diff --git a/modules/shared/defAttributes.py b/modules/shared/defAttributes.py
deleted file mode 100644
index 39fc21a1..00000000
--- a/modules/shared/defAttributes.py
+++ /dev/null
@@ -1,123 +0,0 @@
-from pydantic import BaseModel, Field
-from typing import List, Dict, Any, Optional
-
-# Define the model for attribute definitions
-class AttributeDefinition(BaseModel):
- name: str
- label: str
- type: str
- required: bool = False
- placeholder: Optional[str] = None
- defaultValue: Optional[Any] = None
- options: Optional[List[Dict[str, Any]]] = None
- editable: bool = True
- visible: bool = True
- order: int = 0
- validation: Optional[Dict[str, Any]] = None
- helpText: Optional[str] = None
-
-# Helper classes for type mapping
-typeMappings = {
- "int": "number",
- "str": "string",
- "float": "number",
- "bool": "boolean",
- "List[int]": "array",
- "List[str]": "array",
- "Dict[str, Any]": "object",
- "Optional[str]": "string",
- "Optional[int]": "number",
- "Optional[Dict[str, Any]]": "object"
-}
-
-# Special field types based on naming conventions
-specialFieldTypes = {
- "content": "textarea",
- "description": "textarea",
- "instructions": "textarea",
- "password": "password",
- "email": "email",
- "workspaceId": "select",
- "agentId": "select",
- "type": "select"
-}
-
-# Function to convert a Pydantic model into attribute definitions
-def getModelAttributes(modelClass, userLanguage="de"):
- """
- Converts a Pydantic model into a list of AttributeDefinition objects
- """
- attributes = []
-
- # Go through all fields in the model
- for i, (fieldName, field) in enumerate(modelClass.__fields__.items()):
- # Skip internal fields
- if fieldName.startswith('_') or fieldName in ["label", "fieldLabels"]:
- continue
-
- # Determine the field type
- fieldType = typeMappings.get(str(field.type_), "string")
-
- # Check for special field types
- if fieldName in specialFieldTypes:
- fieldType = specialFieldTypes[fieldName]
-
- # Get the label (if available)
- fieldLabel = fieldName.replace('_', ' ').capitalize()
- if hasattr(modelClass, 'fieldLabels') and fieldName in modelClass.fieldLabels:
- labelObj = modelClass.fieldLabels[fieldName]
- fieldLabel = labelObj.getLabel(userLanguage)
-
- # Determine default values and required status
- required = field.required
- defaultValue = field.default if not field.required else None
-
- # Check for validation rules
- validation = None
- if field.validators:
- validation = {"hasValidators": True}
-
- # Placeholder text
- placeholder = f"Please enter {fieldLabel}"
-
- # Special options for Select fields
- options = None
- if fieldType == "select":
- if fieldName == "type" and modelClass.__name__ == "Agent":
- options = [
- {"value": "Analysis", "label": "Analysis"},
- {"value": "Transformation", "label": "Transformation"},
- {"value": "Generation", "label": "Generation"},
- {"value": "Classification", "label": "Classification"},
- {"value": "Custom", "label": "Custom"}
- ]
-
- # Extract description from Field object
- description = None
- # Try to get description from various possible sources
- if hasattr(field, 'field_info') and hasattr(field.field_info, 'description'):
- description = field.field_info.description
- elif hasattr(field, 'description'):
- description = field.description
- elif hasattr(field, 'schema') and hasattr(field.schema, 'description'):
- description = field.schema.description
-
- # Create attribute definition
- attrDef = AttributeDefinition(
- name=fieldName,
- label=fieldLabel,
- type=fieldType,
- required=required,
- placeholder=placeholder,
- defaultValue=defaultValue,
- options=options,
- editable=fieldName not in ["id", "_mandateId", "_userId", "uploadDate", "_createdAt", "_modifiedAt"],
- visible=fieldName not in ["hashedPassword", "_mandateId", "_userId"],
- order=i,
- validation=validation,
- helpText=description or "" # Set empty string as default value if no description found
- )
-
- attributes.append(attrDef)
-
- return attributes
\ No newline at end of file
diff --git a/modules/workflow/agentBase.py b/modules/workflow/agentBase.py
index 1498346e..a8bc33ae 100644
--- a/modules/workflow/agentBase.py
+++ b/modules/workflow/agentBase.py
@@ -10,7 +10,7 @@ import uuid
from datetime import datetime
from typing import Dict, Any, List, Optional
from modules.shared.mimeUtils import isTextMimeType, determineContentEncoding
-from modules.interfaces.lucydomModel import ChatContent
+from modules.interfaces.serviceChatModel import ChatContent
logger = logging.getLogger(__name__)
@@ -26,27 +26,8 @@ class AgentBase:
self.label = "Base Agent"
self.description = "Base agent functionality"
self.capabilities = []
- self.workflowManager = None
self.service = None
- def setWorkflowManager(self, workflowManager):
- """
- Set the workflow manager reference and validate service container.
-
- Args:
- workflowManager: The workflow manager instance
- """
- if not workflowManager:
- logger.warning("Attempted to set null workflow manager")
- return False
-
- self.workflowManager = workflowManager
-
- # Set service reference from workflow manager if available
- if hasattr(workflowManager, 'service'):
- return self.setService(workflowManager.service)
- return False
-
def setService(self, service):
"""
Set the service container reference and validate required interfaces.
@@ -111,7 +92,7 @@ class AgentBase:
- documents: List of document objects created by the agent,
each containing a "base64Encoded" flag in addition to "label" and "content"
"""
- # Validate service and workflow manager
+ # Validate service manager
if not self.service:
logger.error("Service container not initialized")
return {
@@ -119,13 +100,6 @@ class AgentBase:
"documents": []
}
- if not self.workflowManager:
- logger.error("Workflow manager not initialized")
- return {
- "feedback": "Error: Workflow manager not initialized",
- "documents": []
- }
-
# Base implementation - should be overridden by specialized agents
logger.warning(f"Agent {self.name} is using the default implementation of processTask")
return {
diff --git a/modules/workflow/agentRegistry.py b/modules/workflow/agentRegistry.py
index 8c11bfa0..7677f9e7 100644
--- a/modules/workflow/agentRegistry.py
+++ b/modules/workflow/agentRegistry.py
@@ -6,7 +6,7 @@ import os
import logging
import importlib
from typing import Dict, Any, List, Optional
-from .agentBase import AgentBase
+from modules.workflow.agentBase import AgentBase
logger = logging.getLogger(__name__)
@@ -27,10 +27,10 @@ class AgentRegistry:
if AgentRegistry._instance is not None:
raise RuntimeError("Singleton instance already exists - use getInstance()")
- self.agents = {}
+ self.agents: Dict[str, AgentBase] = {}
self._loadAgents()
- def initialize(self, service=None, workflowManager=None):
+ def initialize(self, service=None):
"""Initialize or update the registry with workflow manager and service references."""
if service:
# Validate required interfaces
@@ -44,10 +44,8 @@ class AgentRegistry:
logger.warning(f"Service container missing required interfaces: {', '.join(missing_interfaces)}")
return False
- # Initialize agents with service and workflow manager
+ # Initialize agents with service
for agent in self.agents.values():
- if workflowManager and hasattr(agent, 'setWorkflowManager'):
- agent.setWorkflowManager(workflowManager)
if service and hasattr(agent, 'setService'):
agent.setService(service)
@@ -132,9 +130,14 @@ class AgentRegistry:
logger.error(f"Agent with identifier '{agentIdentifier}' not found")
return None
- def getAllAgents(self) -> Dict[str, Any]:
- """Return all registered agents."""
- return self.agents
+ def getAllAgents(self) -> Dict[str, AgentBase]:
+ """
+ Get all registered agents.
+
+ Returns:
+ Dictionary mapping agent names to agent instances
+ """
+ return self.agents.copy()
def getAgentInfos(self) -> List[Dict[str, Any]]:
"""Return information about all registered agents."""
diff --git a/modules/workflow/documentProcessor.py b/modules/workflow/documentProcessor.py
index e36476ff..bd099128 100644
--- a/modules/workflow/documentProcessor.py
+++ b/modules/workflow/documentProcessor.py
@@ -8,7 +8,7 @@ import os
import io
from typing import Dict, Any, List, Optional, Union, Tuple
import base64
-from modules.interfaces.lucydomModel import ChatContent
+from modules.interfaces.serviceChatModel import ChatContent
# Configure logger
logger = logging.getLogger(__name__)
diff --git a/modules/workflow/workflowManager.py b/modules/workflow/workflowManager.py
index 43394328..005815c6 100644
--- a/modules/workflow/workflowManager.py
+++ b/modules/workflow/workflowManager.py
@@ -17,7 +17,10 @@ from modules.shared.mimeUtils import isTextMimeType
# Required imports
from modules.workflow.agentRegistry import getAgentRegistry
from modules.workflow.documentProcessor import getDocumentContents
-from modules.interfaces.lucydomModel import UserInputRequest, ChatWorkflow, ChatMessage, ChatLog
+from modules.interfaces.serviceChatModel import (
+ UserInputRequest, ChatWorkflow, ChatMessage, ChatLog,
+ ChatDocument, ChatStat, Workflow
+)
# Configure logger
logger = logging.getLogger(__name__)
@@ -34,6 +37,7 @@ GLOBAL_WORKFLOW_LABELS = {
"failed": "Error in workflow"
}
}
+
class WorkflowStoppedException(Exception):
"""Exception raised when a workflow is forcibly stopped with function checkExitCriteria() """
pass
@@ -45,70 +49,20 @@ class WorkflowManager:
"""Initialize the workflow manager with service container."""
# Store service container
self.service = service
-
- self._mandateId = service.base._mandateId
- self._userId = service.base._userId
+ self.service.logAdd = self.logAdd
self.agentRegistry = getAgentRegistry()
- self.agentRegistry.initialize(service=self.service, workflowManager=self)
-
- def workflowStart(self, workflowId: str, workflowData: dict) -> dict:
- """Start a new workflow with the given ID and data."""
- try:
- # Update the LucyDOM interface with current user context
- self.service.base._mandateId = self._mandateId
- self.service.base._userId = self._userId
-
- # Initialize workflow state
- workflowState = {
- 'workflowId': workflowId,
- 'status': 'running',
- 'startTime': datetime.now().isoformat(),
- 'currentStep': 0,
- 'steps': [],
- 'data': workflowData
- }
-
- # Get workflow definition
- workflowDef = self._getWorkflowDefinition(workflowId)
- if not workflowDef:
- raise ValueError(f"Workflow definition not found for ID: {workflowId}")
-
- # Initialize steps
- workflowState['steps'] = self._initializeSteps(workflowDef)
-
- # Start workflow execution
- self._executeWorkflow(workflowState)
-
- return workflowState
-
- except Exception as e:
- logger.error(f"Error starting workflow {workflowId}: {str(e)}")
- raise
-
- ### Workflow State Machine Implementation
+ self.agentRegistry.initialize(service=self.service)
async def workflowStart(self, userInput: UserInputRequest, workflowId: Optional[str] = None) -> ChatWorkflow:
"""Starts a new workflow or continues an existing one."""
- try:
- # Convert UserInputRequest to dict for processing
- userInputDict = userInput.model_dump()
-
- # Initialize or load workflow
- if workflowId:
- workflow = self.service.base.getWorkflow(workflowId)
- if not workflow:
- raise ValueError(f"Workflow {workflowId} not found")
- else:
- workflow = self.workflowInit()
-
- # Process the workflow
- return await self.workflowProcess(userInputDict, workflow)
-
- except Exception as e:
- logger.error(f"Error in workflowStart: {str(e)}")
- raise
-
- ### Forces exit
+ # 1. Initialize workflow or load existing one
+ workflow = self.workflowInit(workflowId)
+ self.logAdd(workflow, "Starting workflow processing", level="info", progress=0)
+
+ # Start asynchronous processing
+ asyncio.create_task(self.workflowProcess(userInput, workflow))
+
+ return workflow
def checkExitCriteria(self, workflow: ChatWorkflow) -> None:
"""
@@ -118,11 +72,11 @@ class WorkflowManager:
Args:
workflow: ChatWorkflow object to check
"""
- current_workflow = self.service.base.loadWorkflowState(workflow.id)
- if current_workflow["state"] in ["stopped", "failed"]:
- self.logAdd(workflow, f"Workflow processing terminated due to state: {current_workflow['state']}", level="info")
+ current_workflow = self.service.functions.loadWorkflowState(workflow.id)
+ if current_workflow["status"] in ["stopped", "failed"]:
+ self.logAdd(workflow, f"Workflow processing terminated due to status: {current_workflow['status']}", level="info")
# Raise an exception to stop execution
- raise WorkflowStoppedException(f"Workflow execution stopped due to state: {current_workflow['state']}")
+ raise WorkflowStoppedException(f"Workflow execution stopped due to status: {current_workflow['status']}")
async def workflowProcess(self, userInput: Dict[str, Any], workflow: ChatWorkflow) -> ChatWorkflow:
"""
@@ -154,17 +108,17 @@ class WorkflowManager:
# Get detected language and set it in the serviceBase interface
self.checkExitCriteria(workflow)
userLanguage = projectManagerResponse.get("userLanguage", "en")
- self.service.base.setUserLanguage(userLanguage)
+ self.service.functions.setUserLanguage(userLanguage)
# Save the response as a message in the workflow and add log entries
self.checkExitCriteria(workflow)
responseMessage = ChatMessage(
role="assistant",
agentName="Project Manager",
- content=objUserResponse,
+ message=objUserResponse,
status="step" # As per state machine specification
)
- self.messageAdd(workflow, responseMessage.model_dump())
+ self.messageAdd(workflow, responseMessage)
# Add detailed log entry about the task plan
taskPlanLog = "Input: "
@@ -232,7 +186,7 @@ class WorkflowManager:
self.logAdd(workflow, "Creating final response", level="info", progress=90)
finalMessage = await self.generateFinalMessage(objUserResponse, objFinalDocuments, objResults)
finalMessage.status = "last" # As per state machine specification
- self.messageAdd(workflow, finalMessage.model_dump())
+ self.messageAdd(workflow, finalMessage)
# State 7: Workflow Completion
self.checkExitCriteria(workflow)
@@ -240,25 +194,25 @@ class WorkflowManager:
# Update processing time
endTime = time.time()
- workflow.dataStats["processingTime"] = endTime - startTime
+ workflow.stats.processingTime = endTime - startTime
return workflow
except Exception as e:
# State 2: Workflow Exception
logger.error(f"Workflow processing error: {str(e)}", exc_info=True)
- workflow.state = "failed"
+ workflow.status = "failed"
workflow.lastActivity = datetime.now().isoformat()
# Update processing time even on error
endTime = time.time()
- workflow.dataStats["processingTime"] = endTime - startTime
+ workflow.stats.processingTime = endTime - startTime
# Update in database
- self.service.base.updateWorkflow(workflow.id, {
- "state": "failed",
+ self.service.functions.updateWorkflow(workflow.id, {
+ "status": "failed",
"lastActivity": workflow.lastActivity,
- "dataStats": workflow.dataStats
+ "stats": workflow.stats.model_dump()
})
self.logAdd(workflow, f"Workflow failed: {str(e)}", level="error", progress=100)
@@ -276,90 +230,80 @@ class WorkflowManager:
"""
currentTime = datetime.now().isoformat()
- workflowExist = self.service.base.getWorkflow(workflowId)
+ workflowExist = self.service.functions.getWorkflow(workflowId)
if workflowId is None or not workflowExist:
# Create new workflow
newWorkflowId = str(uuid.uuid4()) if workflowId is None else workflowId
workflow = ChatWorkflow(
id=newWorkflowId,
- _mandateId=self._mandateId,
- _userId=self._userId,
+ mandateId=self.functions.mandateId,
+ status="running",
name=f"Workflow {newWorkflowId[:8]}",
startedAt=currentTime,
messages=[], # Empty list - will be filled with references
- messageIds=[], # Initialize empty messageIds list
logs=[],
- dataStats={
- "bytesSent": 0,
- "bytesReceived": 0,
- "tokensUsed": 0,
- "processingTime": 0.0
- },
+ stats=ChatStat(
+ bytesSent=0,
+ bytesReceived=0,
+ tokensUsed=0,
+ processingTime=0.0
+ ),
currentRound=1,
- state="running",
lastActivity=currentTime,
)
# Save to database - only the workflow metadata
workflowDb = workflow.model_dump()
- self.service.base.createWorkflow(workflowDb)
+ self.service.functions.createWorkflow(workflowDb)
self.logAdd(workflow, GLOBAL_WORKFLOW_LABELS["workflowStatusMessages"]["init"], level="info", progress=0)
logger.debug(f"CHECK DATA {workflow}")
return workflow
else:
# State 10: Workflow Resumption - Load existing workflow
- workflow = self.service.base.loadWorkflowState(workflowId)
+ workflow = self.service.functions.loadWorkflowState(workflowId)
workflow = ChatWorkflow(**workflow)
- # Ensure messageIds exists
- if not workflow.messageIds:
- # Initialize from existing messages
- workflow.messageIds = [msg["id"] for msg in workflow.messages]
-
- # Update in database
- self.service.base.updateWorkflow(workflowId, {"messageIds": workflow.messageIds})
-
# Update status and increment round counter
- workflow.state = "running"
+ workflow.status = "running"
workflow.lastActivity = currentTime
# Increment currentRound if it exists, otherwise set it to 1
workflow.currentRound = (workflow.currentRound or 0) + 1
- # Ensure dataStats exists with correct field names
- if not workflow.dataStats:
- workflow.dataStats = {
- "bytesSent": 0,
- "bytesReceived": 0,
- "tokensUsed": 0,
- "processingTime": 0.0
- }
- elif "tokenCount" in workflow.dataStats:
+ # Ensure stats exists with correct field names
+ if not workflow.stats:
+ workflow.stats = ChatStat(
+ bytesSent=0,
+ bytesReceived=0,
+ tokensUsed=0,
+ processingTime=0.0
+ )
+ elif "tokenCount" in workflow.stats:
# Convert old tokenCount to tokensUsed if needed
- workflow.dataStats["tokensUsed"] = workflow.dataStats.pop("tokenCount", 0)
+ workflow.stats.tokensUsed = workflow.stats.pop("tokenCount", 0)
# Update in database - only the relevant workflow fields
workflowUpdate = {
- "state": workflow.state,
+ "status": workflow.status,
"lastActivity": workflow.lastActivity,
"currentRound": workflow.currentRound,
- "dataStats": workflow.dataStats # Include updated dataStats
+ "stats": workflow.stats.model_dump() # Include updated stats
}
- self.service.base.updateWorkflow(workflowId, workflowUpdate)
+ self.service.functions.updateWorkflow(workflowId, workflowUpdate)
self.logAdd(workflow, GLOBAL_WORKFLOW_LABELS["workflowStatusMessages"]["running"], level="info", progress=0)
return workflow
- def workflowFinish(self, workflow: Dict[str, Any]) -> Dict[str, Any]:
+ def workflowFinish(self, workflow: ChatWorkflow) -> ChatWorkflow:
"""
Finalizes a workflow and sets the status to 'completed' (State 7: Workflow Completion).
Args:
- workflow: Workflow object
+ workflow: ChatWorkflow object
Returns:
- Updated workflow object
+ Updated ChatWorkflow object
"""
# Prepare workflow update data
workflowUpdate = {
@@ -368,16 +312,16 @@ class WorkflowManager:
}
# Update the workflow object in memory
- workflow["status"] = workflowUpdate["status"]
- workflow["lastActivity"] = workflowUpdate["lastActivity"]
+ workflow.status = workflowUpdate["status"]
+ workflow.lastActivity = workflowUpdate["lastActivity"]
# Save workflow state to database - only relevant fields
- self.service.base.updateWorkflow(workflow["id"], workflowUpdate)
+ self.service.functions.updateWorkflow(workflow.id, workflowUpdate)
self.logAdd(workflow, GLOBAL_WORKFLOW_LABELS["workflowStatusMessages"]["completed"], level="info", progress=100)
return workflow
- async def projectManagerAnalysis(self, messageUser: Dict[str, Any], workflow: Dict[str, Any]) -> Dict[str, Any]:
+ async def projectManagerAnalysis(self, messageUser: ChatMessage, workflow: ChatWorkflow) -> Dict[str, Any]:
"""
Creates the prompt for the project manager and processes the response (State 4: Project Manager Analysis).
@@ -404,7 +348,7 @@ Based on the user request and the provided documents, please analyze the require
Also, identify the language of the user's request and include it in your response.
-{messageUser.get('content')}
+{messageUser.content}
# Previous conversation history:
@@ -496,7 +440,7 @@ JSON_OUTPUT = {{
# Call the AI service through serviceBase for language support
logger.debug(f"PROJECT MANAGER Planning prompt: {prompt}")
- projectManagerOutput = await self.service.base.callAi([
+ projectManagerOutput = await self.service.functions.callAi([
{
"role": "system",
"content": "You are an experienced project manager who analyzes user requests and creates work plans. You pay very careful attention to ensure that all document dependencies are correct and that no non-existent documents are defined as inputs. The output follows strictly the specified format."
@@ -511,7 +455,7 @@ JSON_OUTPUT = {{
logger.debug(f"PROJECT MANAGER Planning answer: {projectManagerOutput}")
return self.parseJsonResponse(projectManagerOutput)
- async def agentProcessing(self, task: Dict[str, Any], workflow: Dict[str, Any]) -> List[Dict[str, Any]]:
+ async def agentProcessing(self, task: Dict[str, Any], workflow: ChatWorkflow) -> List[Dict[str, Any]]:
"""
Process a single agent task from the workflow (State 5: Agent Execution).
Optimized for the task-based approach where all agents implement processTask.
@@ -565,16 +509,16 @@ JSON_OUTPUT = {{
# Create a standardized task object for the agent as per state machine spec
agentTask = {
"taskId": str(uuid.uuid4()),
- "workflowId": workflow.get("id"),
+ "workflowId": workflow.id,
"prompt": agentPrompt,
"inputDocuments": inputDocuments,
"outputSpecifications": outputSpecs,
"context": {
"workflow": workflow, # Add the complete workflow object
- "workflowRound": workflow.get("currentRound", 1),
+ "workflowRound": workflow.currentRound,
"agentType": agentName,
"timestamp": datetime.now().isoformat(),
- "language": self.service.base.userLanguage # Pass language to agent
+ "language": self.service.functions.userLanguage # Pass language to agent
}
}
@@ -584,7 +528,7 @@ JSON_OUTPUT = {{
logger.debug("TASK: "+self.parseJson2text(agentTask))
# Ensure AI service is available
- if not self.service.base.aiService:
+ if not self.service.functions.aiService:
logger.error("AI service not available in LucyDOM interface")
self.logAdd(workflow, "Error: AI service not available", level="error")
return []
@@ -613,22 +557,22 @@ JSON_OUTPUT = {{
tokensUsed = bytesSent + bytesReceived
# Update workflow statistics
- if 'dataStats' not in workflow:
- workflow['dataStats'] = {
- 'bytesSent': 0,
- 'bytesReceived': 0,
- 'tokensUsed': 0,
- 'processingTime': 0
- }
+ if 'stats' not in workflow:
+ workflow.stats = ChatStat(
+ bytesSent=0,
+ bytesReceived=0,
+ tokensUsed=0,
+ processingTime=0
+ )
- workflow['dataStats']['bytesSent'] += bytesSent
- workflow['dataStats']['bytesReceived'] += bytesReceived
- workflow['dataStats']['tokensUsed'] += tokensUsed
- workflow['dataStats']['processingTime'] += (endTime - startTime)
+ workflow.stats.bytesSent += bytesSent
+ workflow.stats.bytesReceived += bytesReceived
+ workflow.stats.tokensUsed += tokensUsed
+ workflow.stats.processingTime += (endTime - startTime)
# Update in database
- self.service.base.updateWorkflow(workflow["id"], {
- "dataStats": workflow['dataStats']
+ self.service.functions.updateWorkflow(workflow.id, {
+ "stats": workflow.stats.model_dump()
})
logger.debug(f"Agent '{agentName}' completed task. RESULT: {self.parseJson2text(agentResults)}")
@@ -648,10 +592,10 @@ JSON_OUTPUT = {{
# Create a message in the workflow with the agent's response
agentMessage = await self.chatMessageToWorkflow("assistant", agent, agentInputs, workflow)
- agentMessage["status"] = "step" # As per state machine specification
+ agentMessage.status = "step" # As per state machine specification
logger.debug(f"Agent result = {self.parseJson2text(agentMessage)}.")
- return agentMessage.get("documents", [])
+ return agentMessage.documents
except Exception as e:
errorMsg = f"Error executing agent '{agentLabel}': {str(e)}"
@@ -692,7 +636,7 @@ JSON_OUTPUT = {{
break
# Use the serviceBase for language-aware AI calls
- finalPrompt = await self.service.base.callAi([
+ finalPrompt = await self.service.functions.callAi([
{"role": "system", "content": "You are a project manager, who delivers results to a user."},
{"role": "user", "content": f"""
Give the final short feedback to the user with reference to the initial statement (objUserResponse). Inform him about the list of filesDelivered. You do not need to send the files, this is handled separately. If in the list of filesDelivered some files_promised would be missing, just give a comment on this, otherwise task is now completed successfully.
@@ -717,7 +661,7 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
logger.debug(f"FINAL MESSAGE = {self.parseJson2text(finalMessage)}.")
return finalMessage
- async def workflowSummarize(self, workflow: Dict[str, Any], messageUser: Dict[str, Any]) -> str:
+ async def workflowSummarize(self, workflow: ChatWorkflow, messageUser: ChatMessage) -> str:
"""
Creates a summary of the workflow without the current user message.
@@ -742,7 +686,7 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
return "\n\n".join(summaryParts)
- async def messageSummarize(self, message: Dict[str, Any]) -> str:
+ async def messageSummarize(self, message: ChatMessage) -> str:
"""
Creates a summary of a message including its documents.
@@ -752,13 +696,13 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
Returns:
Summary of the message
"""
- role = message.get("role", "undefined")
- agentName = message.get("agentName", "")
- content = message.get("content", "")
+ role = message.role
+ agentName = message.agentName
+ content = message.content
try:
# Use the serviceBase for language-aware AI calls
- contentSummary = await self.service.base.callAi([
+ contentSummary = await self.service.functions.callAi([
{"role": "system", "content": f"You are a chat message summarizer. Create a very concise summary (2-3 sentences, maximum 300 characters)"},
{"role": "user", "content": content}
])
@@ -778,7 +722,7 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
return f"[{role} {agentName}]: {contentSummary}{docsSummary}"
- async def chatMessageToWorkflow(self, role: str, agent: Dict[str, Any], chatMessage: Dict[str, Any], workflow: Dict[str, Any]) -> Dict[str, Any]:
+ async def chatMessageToWorkflow(self, role: str, agent: Dict[str, Any], chatMessage: Dict[str, Any], workflow: ChatWorkflow) -> ChatMessage:
"""
Integrates user inputs into a Message object including files with complete contents (State 3: User Message Processing).
@@ -811,13 +755,13 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
additionalFiles = await self.processFileIds(additionalFileIds)
# Create message object
- messageObject = {
- "role": role,
- "agentName": agentLabel,
- "content": messageContent,
- "documents": additionalFiles,
- "status": chatMessage.get("status", "")
- }
+ messageObject = ChatMessage(
+ role=role,
+ agentName=agentLabel,
+ content=messageContent,
+ documents=additionalFiles,
+ status=chatMessage.get("status", "step")
+ )
messageObject = self.messageAdd(workflow, messageObject)
logger.debug(f"message_user = {self.parseJson2text(messageObject)}.")
@@ -837,20 +781,20 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
tokensUsed = bytesSent
# Update workflow statistics
- if 'dataStats' not in workflow:
- workflow['dataStats'] = {
- 'bytesSent': 0,
- 'bytesReceived': 0,
- 'tokensUsed': 0,
- 'processingTime': 0
- }
+ if 'stats' not in workflow:
+ workflow.stats = ChatStat(
+ bytesSent=0,
+ bytesReceived=0,
+ tokensUsed=0,
+ processingTime=0
+ )
- workflow['dataStats']['bytesSent'] += bytesSent
- workflow['dataStats']['tokensUsed'] += tokensUsed
+ workflow.stats.bytesSent += bytesSent
+ workflow.stats.tokensUsed += tokensUsed
# Update in database
- self.service.base.updateWorkflow(workflow["id"], {
- "dataStats": workflow['dataStats']
+ self.service.functions.updateWorkflow(workflow.id, {
+ "stats": workflow.stats.model_dump()
})
return messageObject
@@ -873,18 +817,18 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
for fileId in fileIds:
try:
# Check if the file exists
- file = self.service.base.getFile(fileId)
+ file = self.service.functions.getFile(fileId)
if not file:
logger.warning(f"File with ID {fileId} not found")
continue
# Check if file belongs to the current mandate
- if file.get("_mandateId") != self._mandateId:
- logger.warning(f"File {fileId} does not belong to mandate {self._mandateId}")
+ if file.get("mandateId") != self.functions.mandateId:
+ logger.warning(f"File {fileId} does not belong to mandate {self.functions.mandateId}")
continue
# Load file content
- fileContent = self.service.base.getFileData(fileId)
+ fileContent = self.service.functions.getFileData(fileId)
if fileContent is None:
logger.warning(f"No content found for file with ID {fileId}")
continue
@@ -894,7 +838,7 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
isTextFormat = isTextMimeType(mimeType)
# Get file data from database
- fileDataEntries = self.service.base.db.getRecordset("fileData", recordFilter={"id": fileId})
+ fileDataEntries = self.service.functions.db.getRecordset("fileData", recordFilter={"id": fileId})
base64Encoded = False
if fileDataEntries and "base64Encoded" in fileDataEntries[0]:
@@ -905,7 +849,6 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
base64Encoded = not isTextFormat
# Convert to base64 for document storage
- import base64
encodedData = ""
if base64Encoded:
@@ -927,20 +870,20 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
# Create document
fileNameExt = file.get("name")
- document = {
- "id": f"doc_{str(uuid.uuid4())}",
- "fileId": fileId,
- "name": os.path.splitext(fileNameExt)[0] if os.path.splitext(fileNameExt)[0] else "noname",
- "ext": os.path.splitext(fileNameExt)[1][1:] if os.path.splitext(fileNameExt)[1] else "bin",
- "mimeType": mimeType,
- "data": encodedData,
- "base64Encoded": base64Encoded,
- "metadata": {
+ document = ChatDocument(
+ id=f"doc_{str(uuid.uuid4())}",
+ fileId=fileId,
+ name=os.path.splitext(fileNameExt)[0] if os.path.splitext(fileNameExt)[0] else "noname",
+ ext=os.path.splitext(fileNameExt)[1][1:] if os.path.splitext(fileNameExt)[1] else "bin",
+ mimeType=mimeType,
+ data=encodedData,
+ base64Encoded=base64Encoded,
+ metadata={
"isText": isTextFormat,
"base64Encoded": base64Encoded # For backward compatibility
},
- "contents": []
- }
+ contents=[]
+ )
# Extract contents
contents = getDocumentContents(file, fileContent)
@@ -954,9 +897,9 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
# Use the flag from metadata if available
content["base64Encoded"] = content.get("metadata", {}).get("base64Encoded", not content.get("metadata", {}).get("isText", False))
- document["contents"] = contents
+ document.contents = contents
- logger.info(f"File {file.get('name', 'unnamed')} (ID: {fileId}) loaded with {len(contents)} contents and summaries")
+ logger.info(f"File {file.name} (ID: {fileId}) loaded with {len(contents)} contents and summaries")
documents.append(document)
except Exception as e:
@@ -966,7 +909,7 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
return documents
- async def prepareAgentInputDocuments(self, docInputList: List[Dict[str, Any]], workflow: Dict[str, Any]) -> List[Dict[str, Any]]:
+ async def prepareAgentInputDocuments(self, docInputList: List[Dict[str, Any]], workflow: ChatWorkflow) -> List[Dict[str, Any]]:
"""
Prepares input documents for an agent, sorted with newest first.
@@ -981,8 +924,8 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
# Sort workflow messages by sequence number (descending)
sortedMessages = sorted(
- workflow.get("messages", []),
- key=lambda m: m.get("sequenceNo", 0),
+ workflow.messages,
+ key=lambda m: m.sequenceNo,
reverse=True
)
@@ -993,8 +936,8 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
foundDoc = None
# Search for the document in sorted workflow messages (newest first)
for message in sortedMessages:
- for doc in message.get("documents", []):
- if (docFileId != "" and docFileId == doc.get("fileId")) or (docFilename != "" and self.getFilename(doc) == docFilename):
+ for doc in message.documents:
+ if (docFileId != "" and docFileId == doc.fileId) or (docFilename != "" and self.getFilename(doc) == docFilename):
foundDoc = doc
break
if foundDoc:
@@ -1009,7 +952,7 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
return preparedInputs
- async def processDocumentForAgent(self, document: Dict[str, Any], docSpec: Dict[str, Any]) -> Dict[str, Any]:
+ async def processDocumentForAgent(self, document: ChatDocument, docSpec: Dict[str, Any]) -> ChatDocument:
"""
Processes a document for an agent based on the document specification.
Uses AI to extract relevant content from the document based on the specification.
@@ -1030,7 +973,7 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
for content in processedDoc["contents"]:
# Check if part required
- if partSpec != "" and partSpec != content.get("name"):
+ if partSpec != "" and partSpec != content.name:
continue
# Get the prompt from the document specification
@@ -1076,13 +1019,13 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
if isBase64:
try:
# Pass base64 encoded data directly to callAi4Image
- return await self.service.base.callAi4Image(data, content.get("mimeType", "application/octet-stream"), imagePrompt)
+ return await self.service.functions.callAi4Image(data, content.mimeType, imagePrompt)
except Exception as e:
logger.error(f"Error processing base64 content: {str(e)}")
return f"Error processing content: {str(e)}"
else:
# For non-base64 content, use callAi
- return await self.service.base.callAi([
+ return await self.service.functions.callAi([
{"role": "system", "content": "You are a content analyzer. Extract relevant information from the provided content."},
{"role": "user", "content": f"{textPrompt}\n\nContent:\n{data}"}
], produceUserAnswer=True)
@@ -1091,7 +1034,7 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
logger.error(f"Error processing content: {str(e)}")
return f"Error processing content: {str(e)}"
- def messageAdd(self, workflow: ChatWorkflow, message: Dict[str, Any]) -> ChatMessage:
+ def messageAdd(self, workflow: ChatWorkflow, message: ChatMessage) -> ChatMessage:
"""
Adds a message to the workflow and updates lastActivity.
Saves the message in the database and updates the workflow with references.
@@ -1107,60 +1050,60 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
currentTime = datetime.now().isoformat()
# Generate new message ID if not present
- if "id" not in message:
- message["id"] = f"msg_{str(uuid.uuid4())}"
+ if message.id is None:
+ message.id = f"msg_{str(uuid.uuid4())}"
# Add workflow ID and timestamps
- message["workflowId"] = workflow.id
- message["startedAt"] = currentTime
- message["finishedAt"] = currentTime
+ message.workflowId = workflow.id
+ message.startedAt = currentTime
+ message.finishedAt = currentTime
# Set sequence number
- message["sequenceNo"] = len(workflow.messages) + 1
+ message.sequenceNo = len(workflow.messages) + 1
# Ensure required fields are present
- if "role" not in message:
+ if message.role is None:
# Set a default role based on agentName
- message["role"] = "assistant" if message.get("agentName") else "user"
+ message.role = "assistant" if message.agentName else "user"
- if "agentName" not in message:
- message["agentName"] = ""
+ if message.agentName is None:
+ message.agentName = ""
# Set status if not present
- if "status" not in message:
- message["status"] = "step"
+ if message.status is None:
+ message.status = "step"
# Calculate statistics for the message
- bytesSent = len(message.get("content", "").encode('utf-8'))
- for doc in message.get("documents", []):
- if doc.get("data"):
- bytesSent += len(doc["data"].encode('utf-8'))
- for content in doc.get("contents", []):
- if content.get("data"):
- bytesSent += len(content["data"].encode('utf-8'))
+ bytesSent = len(message.content.encode('utf-8'))
+ for doc in message.documents:
+ if doc.data:
+ bytesSent += len(doc.data.encode('utf-8'))
+ for content in doc.contents:
+ if content.data:
+ bytesSent += len(content.data.encode('utf-8'))
# Calculate tokens used (now using bytes)
tokensUsed = bytesSent
# Update workflow statistics
- if not workflow.dataStats:
- workflow.dataStats = {
- "bytesSent": 0,
- "bytesReceived": 0,
- "tokensUsed": 0,
- "processingTime": 0
- }
+ if not workflow.stats:
+ workflow.stats = ChatStat(
+ bytesSent=0,
+ bytesReceived=0,
+ tokensUsed=0,
+ processingTime=0
+ )
# Update statistics based on message role
- if message["role"] == "user":
- workflow.dataStats["bytesSent"] += bytesSent
- workflow.dataStats["tokensUsed"] += tokensUsed
+ if message.role == "user":
+ workflow.stats.bytesSent += bytesSent
+ workflow.stats.tokensUsed += tokensUsed
else: # assistant messages
- workflow.dataStats["bytesReceived"] += bytesSent
- workflow.dataStats["tokensUsed"] += tokensUsed
+ workflow.stats.bytesReceived += bytesSent
+ workflow.stats.tokensUsed += tokensUsed
# Create ChatMessage object
- chatMessage = ChatMessage(**message)
+ chatMessage = ChatMessage(**message.model_dump())
# Add message to workflow
workflow.messages.append(chatMessage)
@@ -1176,15 +1119,15 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
workflow.lastActivity = currentTime
# Save to database - first the message itself
- self.service.base.createWorkflowMessage(chatMessage.model_dump())
+ self.service.functions.createWorkflowMessage(chatMessage.model_dump())
# Then save the workflow with updated references and statistics
workflowUpdate = {
"lastActivity": currentTime,
"messageIds": workflow.messageIds,
- "dataStats": workflow.dataStats # Include updated statistics
+ "stats": workflow.stats.model_dump() # Include updated statistics
}
- self.service.base.updateWorkflow(workflow.id, workflowUpdate)
+ self.service.functions.updateWorkflow(workflow.id, workflowUpdate)
return chatMessage
@@ -1214,70 +1157,46 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
def logAdd(self, workflow: ChatWorkflow, message: str, level: str = "info",
progress: Optional[int] = None) -> str:
"""
- Adds a log entry to the workflow and also logs it in the logger.
- Enhanced with standardized formatting and workflow status tracking.
+ Add a log entry to the workflow.
Args:
workflow: ChatWorkflow object
message: Log message
level: Log level (info, warning, error)
- progress: Optional - Progress value (0-100)
+ progress: Optional progress percentage
Returns:
- ID of the created log entry
+ str: ID of the created log entry
"""
- # Generate log ID
- logId = f"log_{str(uuid.uuid4())}"
-
- # Get workflow status
- workflowStatus = workflow.state
-
- # Set agentName from global settings
- agentName = GLOBAL_WORKFLOW_LABELS.get("systemName", "unknown")
-
- # Process message if it contains JSON
- processedMessage = message
try:
- if isinstance(message, str) and ("{" in message or "[" in message):
- # Try to parse as JSON
- jsonObj = json.loads(message)
- # Trim data attribute if present
- processedJson = self._trimDataInJson(jsonObj)
- processedMessage = json.dumps(processedJson)
- except json.JSONDecodeError:
- # If parsing fails, use original message
- pass
-
- # Create log entry
- logEntry = ChatLog(
- id=logId,
- workflowId=workflow.id,
- message=processedMessage,
- type=level,
- timestamp=datetime.now().isoformat(),
- agentName=agentName,
- status=workflowStatus
- )
-
- # Add progress if provided
- if progress is not None:
- logEntry.progress = progress
-
- # Add log to workflow
- workflow.logs.append(logEntry)
-
- # Save in database
- self.service.base.createWorkflowLog(logEntry.model_dump())
-
- # Also log in logger
- if level == "info":
- logger.info(f"Workflow {workflow.id}: {processedMessage}")
- elif level == "warning":
- logger.warning(f"Workflow {workflow.id}: {processedMessage}")
- elif level == "error":
- logger.error(f"Workflow {workflow.id}: {processedMessage}")
-
- return logId
+ # Generate log ID
+ logId = str(uuid.uuid4())
+
+ # Create log entry
+ logEntry = ChatLog(
+ id=logId,
+ workflowId=workflow.id,
+ message=message,
+ level=level,
+ progress=progress,
+ timestamp=datetime.now().isoformat()
+ )
+
+ # Add to workflow logs
+ workflow.logs.append(logEntry)
+
+ # Also log to Python logger
+ logLevel = getattr(logging, level.upper())
+ logger.log(logLevel, f"[Workflow {workflow.id}] {message}")
+
+ # Save to database
+ self.service.functions.saveWorkflowLog(workflow.id, logEntry.model_dump())
+
+ return logId
+
+ except Exception as e:
+ logger.error(f"Error adding log entry: {str(e)}")
+ return ""
def saveAgentDocuments(self, agentResults: Dict[str, Any]) -> List[int]:
"""
@@ -1299,10 +1218,10 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
for doc in documents:
try:
# Extract document data according to LucyDOM model
- name = doc.get("name", "")
- ext = doc.get("ext", "")
- data = doc.get("data", "")
- base64Encoded = doc.get("base64Encoded", False)
+ name = doc.name
+ ext = doc.ext
+ data = doc.data
+ base64Encoded = doc.base64Encoded
# Skip if no name or data
if not name or not data:
@@ -1336,10 +1255,10 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
fileContent = data
# Determine MIME type based on extension
- mimeType = self.service.base.getMimeType(f"{base_name}.{ext}")
+ mimeType = self.service.functions.getMimeType(f"{base_name}.{ext}")
# Create file metadata
- fileMeta = self.service.base.createFile(
+ fileMeta = self.service.functions.createFile(
name=base_name,
mimeType=mimeType,
size=len(fileContent)
@@ -1347,7 +1266,7 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
if fileMeta and "id" in fileMeta:
# Save file content
- if self.service.base.createFileData(fileMeta["id"], fileContent):
+ if self.service.functions.createFileData(fileMeta["id"], fileContent):
fileIds.append(fileMeta["id"])
logger.info(f"Saved document '{base_name}.{ext}' with file ID: {fileMeta['id']} (base64Encoded: {base64Encoded})")
else:
@@ -1362,7 +1281,7 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
return fileIds
- def getAvailableDocuments(self, workflow: Dict[str, Any], messageUser: Dict[str, Any]) -> List[Dict[str, Any]]:
+ def getAvailableDocuments(self, workflow: ChatWorkflow, messageUser: ChatMessage) -> List[Dict[str, Any]]:
"""
Determines all currently available documents from user input and already generated documents.
@@ -1377,27 +1296,27 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
if "messages" in workflow and workflow["messages"]:
for message in workflow["messages"]:
- messageId = message.get("id", "unknown")
- sequenceNr = message.get("sequenceNo", 0)
+ messageId = message.id
+ sequenceNr = message.sequenceNo
# Determine source
- source = "user" if messageId == messageUser.get("id") else "workflow"
+ source = "user" if messageId == messageUser.id else "workflow"
# Process documents in this message
if "documents" in message and message["documents"]:
for doc in message["documents"]:
# Get filename using our helper method
filename = self.getFilename(doc)
- fileId = doc.get("fileId")
+ fileId = doc.fileId
# Extract summaries from all contents
contentSummaries = []
if "contents" in doc and doc["contents"]:
for content in doc["contents"]:
contentSummaries.append({
- "contentPart": content.get("name", "noname"),
- "metadata": content.get("metadata", ""),
- "summary": content.get("summary", "No summary"),
+ "contentPart": content.name,
+ "metadata": content.metadata,
+ "summary": content.summary,
})
else:
# Add a default content summary if no contents exist
@@ -1433,7 +1352,7 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
"""
return self.agentRegistry.getAgentInfos()
- def getFilename(self, document: Dict[str, Any]) -> str:
+ def getFilename(self, document: ChatDocument) -> str:
"""
Gets the filename from a document by combining name and extension.
@@ -1443,8 +1362,8 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
Returns:
Filename with extension
"""
- name = document.get("name", "unnamed")
- ext = document.get("ext", "")
+ name = document.name
+ ext = document.ext
if ext:
return f"{name}.{ext}"
return name
@@ -1500,26 +1419,26 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
"userLanguage": "en"
}
- def _createWorkflowData(self, workflow: Dict[str, Any]) -> Dict[str, Any]:
+ def _createWorkflowData(self, workflow: ChatWorkflow) -> Dict[str, Any]:
"""Creates a workflow data structure."""
return {
- "_mandateId": self._mandateId,
- "_userId": self._userId,
- "name": workflow.get("name", "New Workflow"),
- "status": workflow.get("status", "running"),
- "startedAt": workflow.get("startedAt", self._getCurrentTimestamp()),
- "lastActivity": workflow.get("lastActivity", self._getCurrentTimestamp()),
- "dataStats": workflow.get("dataStats", {})
+ "mandateId": self.functions.mandateId,
+ "userId": self.functions.userid,
+ "name": workflow.name,
+ "status": workflow.status,
+ "startedAt": workflow.startedAt,
+ "lastActivity": workflow.lastActivity,
+ "stats": workflow.stats.model_dump()
}
def _checkFileAccess(self, fileId: int) -> bool:
"""Checks if the current user has access to a file."""
- file = self.service.base.getFile(fileId)
+ file = self.service.functions.getFile(fileId)
if not file:
return False
- if file.get("_mandateId") != self._mandateId:
- logger.warning(f"File {fileId} does not belong to mandate {self._mandateId}")
+ if file.get("mandateId") != self.functions.mandateId:
+ logger.warning(f"File {fileId} does not belong to mandate {self.functions.mandateId}")
return False
return True
@@ -1531,7 +1450,7 @@ _workflowManagerLastAccess = {} # Track last access time for cleanup
async def getWorkflowManager(service) -> WorkflowManager:
"""Get or create a workflow manager instance."""
- contextKey = f"{service.base._mandateId}_{service.base._userId}"
+ contextKey = f"{service.functions.mandateId}_{service.functions.userId}"
# Check if we have a cached instance
if contextKey in _workflowManagers:
@@ -1546,17 +1465,3 @@ async def getWorkflowManager(service) -> WorkflowManager:
_workflowManagerLastAccess[contextKey] = time.time()
return manager
-
-def cleanupWorkflowManager(_mandateId: int, _userId: int) -> None:
- """
- Explicitly cleanup a WorkflowManager instance.
-
- Args:
- _mandateId: ID of the mandate
- _userId: ID of the user
- """
- contextKey = f"{_mandateId}_{_userId}"
- if contextKey in _workflowManagers:
- del _workflowManagers[contextKey]
- if contextKey in _workflowManagerLastAccess:
- del _workflowManagerLastAccess[contextKey]
\ No newline at end of file
diff --git a/notes/changelog.txt b/notes/changelog.txt
index cd0436dd..cffe2509 100644
--- a/notes/changelog.txt
+++ b/notes/changelog.txt
@@ -1,19 +1,51 @@
....................... TASKS
-TODO: Frontend to adapt
-#####################
+WORKFLOW TO ENHANCE WITH self.service container --> let AI define it, then to initialize it for a workflow class
-! function callAI() to ask with userPrompt,systemPrompt optional), not with json
-! in the taskplan to refer files always in context of user/mandate
-! userinput to handle with object AgentQuery --> when received in frontend to enhance for full object
-! user prompt to handle as directive AND content
-! database to work with files per record, not files per table
-! database to serialize list[] objects and replace by id-list
+WORKFLOW: To create model environment: The BUILDING BLOCKS
-we need to adapt following things according to data objects in lucydomModel.py:
-- All file handling in whole code to be with correct file objects FileItem and FileData object
-- Everywhere to use datamodel specification by lucydomModel.py
+self.service:
+- user
+ - attributes (items)
+ - connection (list)
+- functions (serviceManagementClass instance)
+- operator:
+ - for each (list of references)
+ - aiCall
+ - extract(file) -> content
+ - fileref agent 2 fileid
+ - fileid 2 fileref agent
+ - convert(data, format)
+ - create agent input file list
+ - save agent output files
+
+- workflow
+ - active task (reference)
+ - id
+ - progress
+ - status
+ - tasks (list of tasks)
+ - id
+ - input data?
+ - output data?
+ -
+
+
+
+
+Walkthroughs:
+- register
+- login local
+- login msft
+- management pages
+- workflow
+
+
+----------------------- OPEN
+
+
+TODO: DOCUMENT handling in the workflow !!!!!!!!!!!!!!!!!!
- the workflow in "workflowManager.py" to run with pure documents and no content extraction from documents. To use revised Document model everywhere
- Prompts for tasklist to revise accordingly and to make clear, that the prompting for data extraction will be a job for each agent, not to be topic of the taskplan.
- task to the agent to include the prompt for his job to do and also no data extraction. also here to make clear, that data extraction will be done by the agent.
@@ -22,7 +54,6 @@ Implemented agents: they use following tools depending on their job:
- extract content using global function getContent(document list) --> define prompt for each document of to extract data based on agent's task. it creates an ai call to specify the prompt per document to extract relevant data in the required format using the global function documentProcessor() and stores extracted data in the content object to use
- produce message object (feedback prompt, document list)
-
function documentProcessor():
- return one content per document using ai call
- if there are many content objects in a document it uses one ai call per content to be specified, that if no relevant content is in the content object, an empty string is returned, otherwise the text in the required format
@@ -34,25 +65,25 @@ Other topics:
+NEXT:
+
+! function callAI() to ask with userPrompt,systemPrompt optional), not with json
+
+! in the taskplan to refer files always in context of user/mandate
+
+! userinput to handle with object AgentQuery --> when received in frontend to enhance for full object
+
+! user prompt to handle as directive AND file
+
+! database to serialize list[] objects and replace by id-list -> already done in workflow?
+
+! Prompts pro Agent mit prägnantem system prompt ergänzen. erfasse alle kontext-themen, regeln, anweisungen bei nichtwissen, format der antwort (generische stati)
------------------------ OPEN
-PRIO1:
agentDocumentation delivers a ".docx" file, but the content is a ".md" text markup file
-sharepoint connector with document search, content search, content extraction
-
-
-PRIO2:
-
-Integrate NDA Text as modal form - Data governance agreement by login with checkbox
-
-
-
-PRIO3:
-
Tools to transfer incl funds:
- Google SERPAPI (shelly)
- Anthropic Claude (valueon + shelly)
@@ -64,6 +95,56 @@ Tools to transfer incl funds:
----------------------- DONE
+
+FRONTEND
+- the application initiation gets userdata with the token over apiCall.js:/api/local/me --> object:
+ username
+ fullName
+ email
+ language
+ list of connections with attributes:
+ id
+ authority
+ externalUsername
+
+
+Backend
+
+in the backend to handle the routes as follows:
+- routeSecurityLocal.py to handle all local endpoints, to include token generation from local authority in auth.py
+- routeSecurityMsft.py and routeSecurityGoogle.py to handle all their endpoints
+- all routeSecurity*.py to use the same interface to manage tokens and userdata: serviceUserClass.py. This class to have following
+
+logic:
+ - all tokens are stored in one tabel, where each token has the attribute of the according authenticationAuthority
+ - login and logout endoints for "local" use a function "getUseridFromToken" to identify the user context. If user does not exist, error message
+ - login and logout endoints for "msft" and "google" use a function "getUseridFromToken" to identify the user context. If user does not exist for login, to register a new "local" user with the external user data and to attach the external connection. within the identified user context and the connection in its list to send back user context as tokenLocal and connection as tokenExt
+ - the important thing is, that login endpoint serves for two different actions:
+ a) without user context (no tokenLocal), it makes login for a user by external authority and sets user context
+ b) with user context (a tokenLocal provided), it does NOT set a nwe user context, but manipulate a connection in the connection list of a local user
+ - illustrative example of token data to send to UI (attributes):
+ connect and
+ {
+ "token_type": "Bearer",
+ "expires_in": ,
+ "access_token": ,
+ "id_token": ,
+ "client_info": ,
+ "user_info": {
+ "name": "Patrick Motsch",
+ "email": "p.motsch@valueon.ch",
+ "id": "xxx"
+ },
+ "mandateId": "",
+ "userId": "",
+ "id": "tokenid",
+ }
+
+
+
+
+
+
We have to correct the following wrong user access management.
Issue is: when user logs in with "local" managed account and then logs in to msft account with "msft" authority, the userid is switched to the microsoft instance in the workflow. this must not happen.
diff --git a/notes/nda.txt b/notes/nda.txt
deleted file mode 100644
index 02b46bdb..00000000
--- a/notes/nda.txt
+++ /dev/null
@@ -1,37 +0,0 @@
-DATA PROCESSING AND AI USAGE CONSENT AGREEMENT
-
-By indicating your acceptance selecting "I agree", you as the user of this application acknowledge, consent to, and agree to the following terms regarding the processing of your data through artificial intelligence services:
-
-1. CONSENT TO DATA PROCESSING
- 1.1 You expressly authorize the collection, processing, transmission, and storage of any and all data you provide or generate while using our services ("User Data").
- 1.2 You understand and agree that User Data may be transmitted to and processed by third-party artificial intelligence providers, including but not limited to OpenAI and similar AI service providers.
- 1.3 This consent extends to all content, including but not limited to text, images, documents, conversation histories, preferences, activity logs, and any derivative data generated through your interaction with our services.
-
-2. ACKNOWLEDGMENT OF AI PROCESSING RISKS
- 2.1 You acknowledge that artificial intelligence systems process data differently than human operators and may produce unexpected, inaccurate, or inappropriate outputs.
- 2.2 You understand that AI services may retain, learn from, or use submitted data for improving their systems in accordance with their own terms of service.
- 2.3 You recognize that despite reasonable security measures, data transmitted to third-party AI services may be vulnerable to interception, unauthorized access, or breach.
-
-3. WAIVER OF LIABILITY
- 3.1 To the fullest extent permitted by applicable law, you hereby irrevocably and unconditionally waive and release any and all claims, liabilities, damages, losses, expenses, demands, and causes of action against us arising from or related to:
- a) The processing, transmission, storage, or usage of User Data by AI services;
- b) Any outputs, recommendations, or decisions generated by AI systems based on User Data;
- c) Any data breach, unauthorized access, or security incident that occurs after User Data is transmitted to third-party AI providers;
- d) Any unintended disclosure of confidential information processed through AI services;
- e) Any direct, indirect, incidental, special, consequential, or punitive damages, including but not limited to loss of profits, goodwill, data, or other intangible losses.
- 3.2 This waiver applies regardless of whether such damages arise from breach of contract, tort (including negligence), or any other legal theory.
-
-4. USER REPRESENTATIONS AND WARRANTIES
- 4.1 You represent and warrant that:
- a) You have the legal right to provide all User Data submitted;
- b) User Data does not infringe upon any intellectual property rights, privacy rights, or other rights of any third party;
- c) You have obtained all necessary consents from any third parties whose information may be included in User Data;
- d) User Data does not contain any information that is unlawful, harmful, threatening, abusive, harassing, defamatory, or otherwise objectionable.
- 4.2 You agree to indemnify and hold harmless our organization from any third-party claims arising from breach of these representations.
-
-5. LIMITATIONS AND SEVERABILITY
- 5.1 This waiver does not apply to any liability that cannot be excluded or limited by law, including liability for fraud, gross negligence, or willful misconduct.
- 5.2 If any provision of this agreement is found to be unenforceable, the remaining provisions shall remain in full force and effect.
- 5.3 This waiver shall be governed by and construed in accordance with applicable laws, without regard to conflict of law principles.
-
-By selection "I agree", you acknowledge that you have read, understood, and agree to be bound by all the terms and conditions set forth in this agreement.
diff --git a/notes/releasenotes.txt b/notes/releasenotes.txt
new file mode 100644
index 00000000..10d5dadc
--- /dev/null
+++ b/notes/releasenotes.txt
@@ -0,0 +1,8 @@
+New features
+- Limiter and tracking of ip adress access
+- Sessions improved
+- user and connection consequently separated
+- seamless local and external authorities integration
+- audit trail
+- nda disclaimer in login window
+- CSRF Tokens included in forms
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index 83dae32a..07869b5b 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,6 +4,7 @@ uvicorn==0.23.2
python-multipart==0.0.6
httpx==0.25.0
pydantic==1.10.13 # Ältere Version ohne Rust-Abhängigkeit
+slowapi==0.1.8 # For rate limiting
## Authentication & Security
python-jose==3.3.0
@@ -11,6 +12,8 @@ passlib==1.7.4
argon2-cffi>=21.3.0 # Für Passwort-Hashing in gateway_interface.py
google-auth-oauthlib==1.2.0 # Für Google OAuth
google-auth==2.27.0 # Für Google Authentication
+bcrypt==4.0.1 # For password hashing
+python-jose[cryptography]==3.3.0 # For JWT tokens
## Database
mysql-connector-python==8.1.0