refactory complete to review and test

This commit is contained in:
ValueOn AG 2025-05-22 00:48:56 +02:00
parent e6ca2bad17
commit 628cca0ed4
26 changed files with 1981 additions and 1179 deletions

3
app.py
View file

@ -119,3 +119,6 @@ app.include_router(workflowRouter)
from modules.routes.routeMsft import router as msftRouter from modules.routes.routeMsft import router as msftRouter
app.include_router(msftRouter) app.include_router(msftRouter)
from modules.routes.routeGoogle import router as googleRouter
app.include_router(googleRouter)

View file

@ -50,3 +50,8 @@ Agent_Coder_EXECUTION_RETRY = 5
Service_MSFT_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c Service_MSFT_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
Service_MSFT_CLIENT_SECRET = Kxf8Q~2lJIteZ~JaI32kMf1lfaWKATqxXiNiFbzV Service_MSFT_CLIENT_SECRET = Kxf8Q~2lJIteZ~JaI32kMf1lfaWKATqxXiNiFbzV
Service_MSFT_TENANT_ID = common Service_MSFT_TENANT_ID = common
# Google Service configuration
Service_GOOGLE_CLIENT_ID = your-google-client-id
Service_GOOGLE_CLIENT_SECRET = your-google-client-secret
Service_GOOGLE_REDIRECT_URI = http://localhost:8000/api/google/auth/callback

View file

@ -40,5 +40,6 @@ APP_LOGGING_FILE_ENABLED = True
APP_LOGGING_ROTATION_SIZE = 10485760 APP_LOGGING_ROTATION_SIZE = 10485760
APP_LOGGING_BACKUP_COUNT = 5 APP_LOGGING_BACKUP_COUNT = 5
# Agent Mail # Service Redirects
Service_MSFT_REDIRECT_URI = http://localhost:8000/api/msft/auth/callback Service_MSFT_REDIRECT_URI = http://localhost:8000/api/msft/auth/callback
Service_GOOGLE_REDIRECT_URI = http://localhost:8000/api/google/auth/callback

View file

@ -40,5 +40,6 @@ APP_LOGGING_FILE_ENABLED = True
APP_LOGGING_ROTATION_SIZE = 10485760 APP_LOGGING_ROTATION_SIZE = 10485760
APP_LOGGING_BACKUP_COUNT = 5 APP_LOGGING_BACKUP_COUNT = 5
# Service MSFT # Service Redirects
Service_MSFT_REDIRECT_URI = https://gateway.poweron-center.net/api/msft/auth/callback Service_MSFT_REDIRECT_URI = https://gateway.poweron-center.net/api/msft/auth/callback
Service_GOOGLE_REDIRECT_URI = http://gateway.poweron-center.net/api/google/auth/callback

View file

@ -7,12 +7,15 @@ import logging
import json import json
import io import io
import base64 import base64
from typing import Dict, Any, List import os
import time
from typing import Dict, Any, List, Optional
import pandas as pd import pandas as pd
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import seaborn as sns import seaborn as sns
from modules.workflow.agentBase import AgentBase from modules.workflow.agentBase import AgentBase
from modules.interfaces.lucydomModel import ChatContent
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -634,97 +637,33 @@ class AgentAnalyst(AgentBase):
return self.formatAgentDocumentOutput(outputLabel, imgData, f"image/{formatType}") return self.formatAgentDocumentOutput(outputLabel, imgData, f"image/{formatType}")
async def _createDataDocument(self, datasets: Dict, prompt: str, outputLabel: str, async def _createDataDocument(self, datasets: Dict, prompt: str, outputLabel: str,
analysisPlan: Dict, description: str) -> Dict: analysisPlan: Dict, description: str) -> ChatContent:
""" """
Create a data document (e.g., CSV, JSON) based on analysis. Create a data document (CSV, JSON, Excel) from analysis results.
Args: Args:
datasets: Dictionary of datasets datasets: Dictionary of datasets
prompt: Original task prompt prompt: Original task prompt
outputLabel: Output filename outputLabel: Output filename
analysisPlan: Analysis plan from AI analysisPlan: Analysis plan
description: Output description description: Output description
Returns: Returns:
Data document ChatContent object
""" """
# Determine format from filename
formatType = outputLabel.split('.')[-1].lower()
# If no datasets available, return error message
if not datasets:
return {
"label": outputLabel,
"content": f"No data available for processing into {formatType} format.",
"metadata": {
"contentType": "text/plain"
}
}
# Generate data processing instructions
dataPrompt = f"""
Create Python code to process datasets and generate a {formatType} file for:
TASK: {prompt}
OUTPUT REQUIREMENTS:
- Format: {formatType}
- Filename: {outputLabel}
- Description: {description}
ANALYSIS CONTEXT:
{json.dumps(analysisPlan, indent=2)}
AVAILABLE DATASETS:
"""
# Add dataset info
for name, df in datasets.items():
dataPrompt += f"\nDataset '{name}':\n"
dataPrompt += f"- Shape: {df.shape}\n"
dataPrompt += f"- Columns: {df.columns.tolist()}\n"
dataPrompt += f"- Sample data: {df.head(3).to_dict(orient='records')}\n"
dataPrompt += """
Generate Python code that:
1. Processes the available dataset(s)
2. Performs necessary transformations, aggregations, or calculations
3. Outputs the result in the requested format
4. Returns the content as a string variable named 'result'
Return ONLY executable Python code, no explanations or markdown.
"""
try: try:
# Get data processing code from AI # Determine format from filename
dataCode = await self.service.base.callAi([ formatType = outputLabel.split('.')[-1].lower() if '.' in outputLabel else "csv"
{"role": "system", "content": "You are a data processing expert. Provide only executable Python code."},
{"role": "user", "content": dataPrompt}
], produceUserAnswer = True)
# Clean code # Process data based on format
dataCode = dataCode.replace("```python", "").replace("```", "").strip() if formatType == "csv":
result = self._convertToCsv(datasets)
# Setup execution environment elif formatType == "json":
localVars = {"pd": pd, "np": __import__('numpy'), "io": io} result = json.dumps(datasets, indent=2)
elif formatType == "xlsx":
# Add datasets to local variables result = self._convertToExcel(datasets)
for name, df in datasets.items(): else:
# Create a sanitized variable name result = str(datasets)
varName = ''.join(c if c.isalnum() else '_' for c in name)
localVars[varName] = df
# Also add with standard names for simpler code
if "df" not in localVars:
localVars["df"] = df
elif "df2" not in localVars:
localVars["df2"] = df
# Execute the code
exec(dataCode, globals(), localVars)
# Get the result
result = localVars.get("result", "No output was generated.")
# Determine content type # Determine content type
contentType = "text/csv" if formatType == "csv" else \ contentType = "text/csv" if formatType == "csv" else \
@ -734,89 +673,71 @@ class AgentAnalyst(AgentBase):
return self.formatAgentDocumentOutput(outputLabel, result, contentType) return self.formatAgentDocumentOutput(outputLabel, result, contentType)
except Exception as e: except Exception as e:
logger.error(f"Error creating data document: {str(e)}", exc_info=True) logger.error(f"Error creating data document: {str(e)}", exc_info=True)
return { errorContent = f"Error generating {formatType} document: {str(e)}"
"label": outputLabel, return self.formatAgentDocumentOutput(outputLabel, errorContent, "text/plain")
"content": f"Error generating {formatType} document: {str(e)}",
"metadata": {
"contentType": "text/plain"
}
}
async def _createTextDocument(self, datasets: Dict, context: str, prompt: str, async def _createTextDocument(self, datasets: Dict, context: str, prompt: str,
outputLabel: str, formatType: str, outputLabel: str, formatType: str,
analysisPlan: Dict, description: str) -> Dict: analysisPlan: Dict, description: str) -> ChatContent:
""" """
Create a text document (report, analysis, etc.) based on analysis. Create a text document (markdown, HTML, text) from analysis results.
Args: Args:
datasets: Dictionary of datasets datasets: Dictionary of datasets
context: Document context text context: Document context
prompt: Original task prompt prompt: Original task prompt
outputLabel: Output filename outputLabel: Output filename
formatType: Output format type formatType: Output format
analysisPlan: Analysis plan from AI analysisPlan: Analysis plan
description: Output description description: Output description
Returns: Returns:
Text document ChatContent object
""" """
# Create dataset summaries
datasetSummaries = []
for name, df in datasets.items():
summary = f"Dataset: {name}\n"
summary += f"- Shape: {df.shape[0]} rows, {df.shape[1]} columns\n"
summary += f"- Columns: {', '.join(df.columns.tolist())}\n"
# Basic statistics for numeric columns
numericCols = df.select_dtypes(include=['number']).columns
if len(numericCols) > 0:
summary += "- Numeric Columns Stats:\n"
for col in numericCols[:3]: # Limit to first 3
stats = df[col].describe()
summary += f" - {col}: min={stats['min']:.2f}, max={stats['max']:.2f}, mean={stats['mean']:.2f}\n"
datasetSummaries.append(summary)
# Determine content type based on format
contentType = "text/markdown" if formatType in ["md", "markdown"] else \
"text/html" if formatType == "html" else \
"text/plain"
# Generate analysis prompt
analysisPrompt = f"""
Create a detailed {formatType} document for:
TASK: {prompt}
OUTPUT REQUIREMENTS:
- Format: {formatType}
- Filename: {outputLabel}
- Description: {description}
ANALYSIS CONTEXT:
{json.dumps(analysisPlan, indent=2)}
DATASET SUMMARIES:
{"".join(datasetSummaries)}
DOCUMENT CONTEXT:
{context[:2000]}... (truncated)
Create a comprehensive, professional analysis document that addresses the task requirements.
The document should:
1. Have a clear structure with headings and sections
2. Include relevant data findings and insights
3. Provide appropriate interpretations and recommendations
4. Format the content according to the required output format
Your response should be the complete document content in the specified format.
"""
try: try:
# Generate dataset summaries
datasetSummaries = []
for name, df in datasets.items():
summary = f"\nDataset: {name}\n"
summary += f"Shape: {df.shape}\n"
summary += f"Columns: {', '.join(df.columns)}\n"
if not df.empty:
summary += f"Sample data:\n{df.head(3).to_string()}\n"
datasetSummaries.append(summary)
# Generate analysis prompt
analysisPrompt = f"""
Create a detailed {formatType} document for:
TASK: {prompt}
OUTPUT REQUIREMENTS:
- Format: {formatType}
- Filename: {outputLabel}
- Description: {description}
ANALYSIS CONTEXT:
{json.dumps(analysisPlan, indent=2)}
DATASET SUMMARIES:
{"".join(datasetSummaries)}
DOCUMENT CONTEXT:
{context[:2000]}... (truncated)
Create a comprehensive, professional analysis document that addresses the task requirements.
The document should:
1. Have a clear structure with headings and sections
2. Include relevant data findings and insights
3. Provide appropriate interpretations and recommendations
4. Format the content according to the required output format
Your response should be the complete document content in the specified format.
"""
# Get document content from AI # Get document content from AI
documentContent = await self.service.base.callAi([ documentContent = await self.service.base.callAi([
{"role": "system", "content": f"You are a data analysis expert creating a {formatType} document."}, {"role": "system", "content": f"You are a data analysis expert creating a {formatType} document."},
@ -829,6 +750,11 @@ class AgentAnalyst(AgentBase):
elif formatType == "html" and not "<html" in documentContent.lower(): elif formatType == "html" and not "<html" in documentContent.lower():
documentContent = f"<html><body>{documentContent}</body></html>" documentContent = f"<html><body>{documentContent}</body></html>"
# Determine content type
contentType = "text/markdown" if formatType in ["md", "markdown"] else \
"text/html" if formatType == "html" else \
"text/plain"
return self.formatAgentDocumentOutput(outputLabel, documentContent, contentType) return self.formatAgentDocumentOutput(outputLabel, documentContent, contentType)
except Exception as e: except Exception as e:
@ -842,13 +768,7 @@ class AgentAnalyst(AgentBase):
else: else:
content = f"Error in Analysis\n\nThere was an error generating the analysis: {str(e)}" content = f"Error in Analysis\n\nThere was an error generating the analysis: {str(e)}"
return { return self.formatAgentDocumentOutput(outputLabel, content, contentType)
"label": outputLabel,
"content": content,
"metadata": {
"contentType": contentType
}
}
def _getImageBase64(self, formatType: str = 'png') -> str: def _getImageBase64(self, formatType: str = 'png') -> str:
""" """

View file

@ -8,8 +8,10 @@ from typing import Dict, Any, List
import json import json
import re import re
from datetime import datetime from datetime import datetime
import os
from modules.workflow.agentBase import AgentBase from modules.workflow.agentBase import AgentBase
from modules.interfaces.lucydomModel import ChatContent
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -292,7 +294,7 @@ class AgentDocumentation(AgentBase):
} }
async def _createDocumentMultiStep(self, prompt: str, context: str, outputLabel: str, async def _createDocumentMultiStep(self, prompt: str, context: str, outputLabel: str,
outputDescription: str, documentationPlan: Dict) -> Dict: outputDescription: str, documentationPlan: Dict) -> ChatContent:
""" """
Create a document using a multi-step approach with separate AI calls for each section. Create a document using a multi-step approach with separate AI calls for each section.
@ -304,155 +306,123 @@ class AgentDocumentation(AgentBase):
documentationPlan: Documentation plan from AI documentationPlan: Documentation plan from AI
Returns: Returns:
Document object ChatContent object
""" """
# Determine format from filename
formatType = outputLabel.split('.')[-1].lower() if '.' in outputLabel else "md"
# Map format to contentType
contentTypeMap = {
"md": "text/markdown",
"markdown": "text/markdown",
"html": "text/html",
"txt": "text/plain",
"text": "text/plain",
"json": "application/json",
"csv": "text/csv"
}
contentType = contentTypeMap.get(formatType, "text/plain")
# Get document information
title = documentationPlan.get("title", "Documentation")
documentType = documentationPlan.get("documentType", "document")
audience = documentationPlan.get("audience", "general")
tone = documentationPlan.get("tone", "formal")
keyTopics = documentationPlan.get("keyTopics", [])
formattingRequirements = documentationPlan.get("formattingRequirements", [])
# Get the detailed structure
detailedStructure = documentationPlan.get("detailedStructure", [])
if not detailedStructure:
# Fallback structure if none provided
detailedStructure = [
{
"title": "Introduction (Default)",
"keyPoints": ["Purpose", "Scope"],
"importance": "high"
},
{
"title": "Main Content (Default)",
"keyPoints": ["Core Information"],
"importance": "high"
},
{
"title": "Conclusion (Default)",
"keyPoints": ["Summary", "Next Steps"],
"importance": "medium"
}
]
try: try:
# Step 1: Generate document introduction # Determine format from filename
formatType = outputLabel.split('.')[-1].lower() if '.' in outputLabel else "md"
# Map format to contentType
contentTypeMap = {
"md": "text/markdown",
"markdown": "text/markdown",
"html": "text/html",
"txt": "text/plain",
"text": "text/plain",
"json": "application/json",
"csv": "text/csv"
}
contentType = contentTypeMap.get(formatType, "text/plain")
# Get document information
title = documentationPlan.get("title", "Documentation")
documentType = documentationPlan.get("documentType", "document")
audience = documentationPlan.get("audience", "general")
tone = documentationPlan.get("tone", "formal")
keyTopics = documentationPlan.get("keyTopics", [])
formattingRequirements = documentationPlan.get("formattingRequirements", [])
# Get the detailed structure
detailedStructure = documentationPlan.get("detailedStructure", [])
# Step 1: Generate executive summary
summaryPrompt = f"""
Create an executive summary for a {documentType} titled "{title}".
DOCUMENT OVERVIEW:
- Type: {documentType}
- Audience: {audience}
- Key Topics: {', '.join(keyTopics)}
TASK CONTEXT: {prompt}
The executive summary should:
1. Provide a concise overview of the document's purpose
2. Highlight key points and findings
3. Be clear and engaging for the target audience
4. Set expectations for the document's content
Keep the summary brief but comprehensive.
"""
executiveSummary = await self.service.base.callAi([
{"role": "system", "content": f"You are a documentation expert creating an executive summary in {formatType} format."},
{"role": "user", "content": summaryPrompt}
], produceUserAnswer = True)
# Step 2: Generate introduction
introPrompt = f""" introPrompt = f"""
Create the introduction for a {documentType} titled "{title}". Create an introduction for a {documentType} titled "{title}".
DOCUMENT OVERVIEW: DOCUMENT OVERVIEW:
- Type: {documentType} - Type: {documentType}
- Audience: {audience} - Audience: {audience}
- Tone: {tone} - Key Topics: {', '.join(keyTopics)}
- Key Topics: {', '.join(keyTopics)}
- Format: {formatType}
TASK CONTEXT: {prompt} TASK CONTEXT: {prompt}
This introduction should: The introduction should:
1. Clearly state the purpose and scope of the document 1. Set the context and purpose of the document
2. Provide context and background information 2. Outline the scope and objectives
3. Outline what the reader will find in the document 3. Preview the main topics to be covered
4. Set the appropriate tone for the {audience} audience 4. Engage the reader's interest
The introduction should be professional and engaging, but short and precise, formatted according to {formatType} standards. do not add details, which are not requested by the Task Context. Format the introduction according to {formatType} standards.
""" """
introduction = await self.service.base.callAi([ introduction = await self.service.base.callAi([
{"role": "system", "content": f"You are a documentation expert creating an introduction in {formatType} format."}, {"role": "system", "content": f"You are a documentation expert creating an introduction in {formatType} format."},
{"role": "user", "content": introPrompt} {"role": "user", "content": introPrompt}
], produceUserAnswer = True) ], produceUserAnswer = True)
# Step 2: Generate executive summary (if applicable) # Step 3: Generate main sections
if documentType in ["report", "whitepaper", "case study"]: sections = []
summaryPrompt = f""" for section in detailedStructure:
Create an executive summary for a {documentType} titled "{title}". sectionTitle = section.get("title", "Section")
keyPoints = section.get("keyPoints", [])
subsections = section.get("subsections", [])
importance = section.get("importance", "medium")
estimatedLength = section.get("estimatedLength", "medium")
DOCUMENT OVERVIEW: sectionPrompt = f"""
Create the {sectionTitle} section for a {documentType} titled "{title}".
SECTION DETAILS:
- Title: {sectionTitle}
- Key Points: {', '.join(keyPoints)}
- Subsections: {', '.join(subsections)}
- Importance: {importance}
- Estimated Length: {estimatedLength}
DOCUMENT CONTEXT:
- Type: {documentType} - Type: {documentType}
- Audience: {audience} - Audience: {audience}
- Key Topics: {', '.join(keyTopics)} - Key Topics: {', '.join(keyTopics)}
TASK CONTEXT: {prompt} TASK CONTEXT: {prompt}
This executive summary should: The section should:
1. Provide a concise overview of the entire document 1. Cover all key points thoroughly
2. Highlight key findings, recommendations, or conclusions 2. Include relevant subsections
3. Be suitable for executives or busy readers who may only read this section 3. Maintain appropriate depth based on importance
4. Be professionally formatted according to {formatType} standards 4. Follow the document's tone and style
Keep the summary focused and impactful, approximately 200-300 words. Format the section according to {formatType} standards.
""" """
executiveSummary = await self.service.base.callAi([
{"role": "system", "content": f"You are a documentation expert creating an executive summary in {formatType} format."},
{"role": "user", "content": summaryPrompt}
], produceUserAnswer = True)
else:
executiveSummary = ""
# Step 3: Generate each section
sections = []
for section in detailedStructure:
sectionTitle = section.get("title", "Section")
keyPoints = section.get("keyPoints", [])
subsections = section.get("subsections", [])
importance = section.get("importance", "medium")
# Adjust depth based on importance
detailLevel = "high" if importance == "high" else "medium"
sectionPrompt = f"""
Create the "{sectionTitle}" section for a {documentType} titled "{title}".
SECTION DETAILS:
- Title: {sectionTitle}
- Key Points to Cover: {', '.join(keyPoints)}
- Subsections: {', '.join(subsections)}
- Detail Level: {detailLevel}
DOCUMENT CONTEXT:
- Type: {documentType}
- Audience: {audience}
- Tone: {tone}
- Format: {formatType}
TASK CONTEXT: {prompt}
AVAILABLE INFORMATION:
{context[:500]}... (truncated)
This section should:
1. Be comprehensive and well-structured
2. Cover all the key points listed
3. Include the specified subsections with appropriate headings
4. Maintain a {tone} tone suitable for the {audience} audience
5. Be properly formatted according to {formatType} standards
6. Include specific examples, data, or evidence where appropriate
Be thorough in your coverage of this section, providing substantive content focussing on the Task content.
"""
sectionContent = await self.service.base.callAi([ sectionContent = await self.service.base.callAi([
{"role": "system", "content": f"You are a documentation expert creating detailed content for the {sectionTitle} section."}, {"role": "system", "content": f"You are a documentation expert creating a section in {formatType} format."},
{"role": "user", "content": sectionPrompt} {"role": "user", "content": sectionPrompt}
], produceUserAnswer = True) ], produceUserAnswer = True)
@ -548,13 +518,7 @@ Be thorough in your coverage of this section, providing substantive content focu
else: else:
content = f"Error in Documentation\n\nThere was an error generating the documentation: {str(e)}" content = f"Error in Documentation\n\nThere was an error generating the documentation: {str(e)}"
return { return self.formatAgentDocumentOutput(outputLabel, content, contentType)
"label": outputLabel,
"content": content,
"metadata": {
"contentType": contentType
}
}
# Factory function for the Documentation agent # Factory function for the Documentation agent

View file

@ -14,7 +14,7 @@ from passlib.context import CryptContext
from modules.connectors.connectorDbJson import DatabaseConnector from modules.connectors.connectorDbJson import DatabaseConnector
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
from modules.interfaces.gatewayAccess import GatewayAccess from modules.interfaces.gatewayAccess import GatewayAccess
from modules.interfaces.gatewayModel import User, Mandate, UserInDB from modules.interfaces.gatewayModel import User, Mandate, UserInDB, UserConnection
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -22,6 +22,9 @@ logger = logging.getLogger(__name__)
# Singleton factory for GatewayInterface instances per context # Singleton factory for GatewayInterface instances per context
_gatewayInterfaces = {} _gatewayInterfaces = {}
# Root interface instance
_rootGatewayInterface = None
# Password-Hashing # Password-Hashing
pwdContext = CryptContext(schemes=["argon2"], deprecated="auto") pwdContext = CryptContext(schemes=["argon2"], deprecated="auto")
@ -32,18 +35,22 @@ class GatewayInterface:
Manages users and mandates. Manages users and mandates.
""" """
def __init__(self): def __init__(self, currentUser: Dict[str, Any] = None):
"""Initializes the Gateway Interface.""" """Initializes the Gateway Interface."""
# Initialize variables
self.currentUser = currentUser
self.userId = currentUser.get("id") if currentUser else None
self.access = None # Will be set when user context is provided
# Initialize database # Initialize database
self._initializeDatabase() self._initializeDatabase()
# Initialize standard records if needed # Initialize standard records if needed
self._initRecords() self._initRecords()
# Initialize variables # Set user context if provided
self.currentUser = None if currentUser:
self.userId = None self.setUserContext(currentUser)
self.access = None # Will be set when user context is provided
def setUserContext(self, currentUser: Dict[str, Any]): def setUserContext(self, currentUser: Dict[str, Any]):
"""Sets the user context for the interface.""" """Sets the user context for the interface."""
@ -338,10 +345,117 @@ class GatewayInterface:
return User(**user) return User(**user)
def addUserConnection(self, userId: str, authority: str, externalId: str, externalUsername: str, externalEmail: Optional[str] = None) -> UserConnection:
"""Add a new connection to an external service for a user"""
try:
# Get user
user = self.getUser(userId)
if not user:
raise ValueError(f"User {userId} not found")
# Check if connection already exists
for conn in user.connections:
if conn.authority == authority and conn.externalId == externalId:
raise ValueError(f"Connection to {authority} already exists for user {userId}")
# Create new connection
connection = UserConnection(
authority=authority,
externalId=externalId,
externalUsername=externalUsername,
externalEmail=externalEmail
)
# Add connection to user
user.connections.append(connection)
# Update user record
self.db.recordModify("users", userId, {"connections": [c.model_dump() for c in user.connections]})
return connection
except Exception as e:
logger.error(f"Error adding user connection: {str(e)}")
raise ValueError(f"Failed to add user connection: {str(e)}")
def removeUserConnection(self, userId: str, connectionId: str) -> None:
"""Remove a connection to an external service for a user"""
try:
# Get user
user = self.getUser(userId)
if not user:
raise ValueError(f"User {userId} not found")
# Find and remove connection
user.connections = [c for c in user.connections if c.id != connectionId]
# Update user record
self.db.recordModify("users", userId, {"connections": [c.model_dump() for c in user.connections]})
except Exception as e:
logger.error(f"Error removing user connection: {str(e)}")
raise ValueError(f"Failed to remove user connection: {str(e)}")
def authenticateUser(self, username: str, password: str = None, authority: str = "local", external_token: str = None) -> Optional[User]:
"""Authenticates a user by username and password or external authority."""
# Clear the users table from cache and reload it
if "users" in self.db._tablesCache:
del self.db._tablesCache["users"]
# Get user by username
user = self.getUserByUsername(username)
if not user:
raise ValueError("User not found")
# Check if the user is disabled
if user.disabled:
raise ValueError("User is disabled")
# Handle authentication based on authority
if authority == "local":
if not password:
raise ValueError("Password is required for local authentication")
# Get the full user record with password hash for verification
userWithPassword = UserInDB(**self.db.getRecordset("users", recordFilter={"id": user.id})[0])
if not self._verifyPassword(password, userWithPassword.hashedPassword):
raise ValueError("Invalid password")
elif authority in ["microsoft", "google"]: # Support for multiple external auth providers
# Verify that the user has the correct authentication authority
if user.authenticationAuthority != authority:
raise ValueError(f"User does not have {authority} authentication enabled")
# Verify that the user has a valid connection for this authority
if not any(conn.authority == authority for conn in user.connections):
raise ValueError(f"User does not have a valid {authority} connection")
# Verify the external token
if not external_token:
raise ValueError(f"External token is required for {authority} authentication")
# Get the appropriate auth service
if authority == "microsoft":
from .msftInterface import getInterface as getMsftInterface
auth_service = getMsftInterface({"_mandateId": user._mandateId, "id": user.id})
elif authority == "google":
from .googleInterface import getInterface as getGoogleInterface
auth_service = getGoogleInterface({"_mandateId": user._mandateId, "id": user.id})
else:
raise ValueError(f"Unsupported authentication authority: {authority}")
# Verify the token
if not auth_service.verifyToken(external_token):
raise ValueError(f"Invalid or expired {authority} token")
else:
raise ValueError(f"Unknown authentication authority: {authority}")
return user
def createUser(self, username: str, password: str = None, email: str = None, fullName: str = None, def createUser(self, username: str, password: str = None, email: str = None, fullName: str = None,
language: str = "en", disabled: bool = False, language: str = "en", disabled: bool = False,
privilege: str = "user", authenticationAuthority: str = "local") -> User: privilege: str = "user", authenticationAuthority: str = "local",
"""Create a new user""" externalId: str = None, externalUsername: str = None, externalEmail: str = None) -> User:
"""Create a new user with optional external connection"""
try: try:
# Validate username # Validate username
if not username: if not username:
@ -369,7 +483,8 @@ class GatewayInterface:
disabled=disabled, disabled=disabled,
privilege=privilege, privilege=privilege,
authenticationAuthority=authenticationAuthority, authenticationAuthority=authenticationAuthority,
hashedPassword=self._getPasswordHash(password) if authenticationAuthority == "local" else None hashedPassword=self._getPasswordHash(password) if authenticationAuthority == "local" else None,
connections=[]
) )
# Create user record # Create user record
@ -377,6 +492,16 @@ class GatewayInterface:
if not createdRecord or not createdRecord.get("id"): if not createdRecord or not createdRecord.get("id"):
raise ValueError("Failed to create user record") raise ValueError("Failed to create user record")
# Add external connection if provided
if externalId and externalUsername:
self.addUserConnection(
createdRecord["id"],
authenticationAuthority,
externalId,
externalUsername,
externalEmail
)
# Get created user using the returned ID # Get created user using the returned ID
createdUser = self.db.getRecordset("users", recordFilter={"id": createdRecord["id"]}) createdUser = self.db.getRecordset("users", recordFilter={"id": createdRecord["id"]})
if not createdUser or len(createdUser) == 0: if not createdUser or len(createdUser) == 0:
@ -398,41 +523,6 @@ class GatewayInterface:
logger.error(f"Unexpected error creating user: {str(e)}") logger.error(f"Unexpected error creating user: {str(e)}")
raise ValueError(f"Failed to create user: {str(e)}") raise ValueError(f"Failed to create user: {str(e)}")
def authenticateUser(self, username: str, password: str = None) -> Optional[User]:
"""Authenticates a user by username and password."""
# Clear the users table from cache and reload it
if "users" in self.db._tablesCache:
del self.db._tablesCache["users"]
# Get user by username
user = self.getUserByUsername(username)
if not user:
raise ValueError("Benutzer nicht gefunden")
# Check if the user is disabled
if user.disabled:
raise ValueError("Benutzer ist deaktiviert")
# Handle authentication based on authority
auth_authority = user.authenticationAuthority
if auth_authority == "local":
if not password:
raise ValueError("Passwort ist erforderlich")
# Get the full user record with password hash for verification
userWithPassword = UserInDB(**self.db.getRecordset("users", recordFilter={"id": user.id})[0])
if not self._verifyPassword(password, userWithPassword.hashedPassword):
raise ValueError("Falsches Passwort")
elif auth_authority == "microsoft":
# For Microsoft users, we don't verify the password here
# The authentication is handled by the Microsoft OAuth flow
pass
else:
raise ValueError(f"Unbekannte Authentifizierungsmethode: {auth_authority}")
return user
def updateUser(self, userId: str, userData: Dict[str, Any]) -> User: def updateUser(self, userId: str, userData: Dict[str, Any]) -> User:
"""Updates a user if current user has permission.""" """Updates a user if current user has permission."""
# Check if the user exists and current user has access # Check if the user exists and current user has access
@ -525,21 +615,83 @@ class GatewayInterface:
return success return success
def getInterface(currentUser: Dict[str, Any] = None) -> 'GatewayInterface': def setupLocalAuth(self, userId: str, password: str) -> User:
"""Set up local authentication for a user who registered with Microsoft"""
try:
# Get user
user = self.getUser(userId)
if not user:
raise ValueError(f"User {userId} not found")
# Validate password
if not password:
raise ValueError("Password is required")
if len(password) < 8:
raise ValueError("Password must be at least 8 characters long")
# Update user with local password
userData = {
"hashedPassword": self._getPasswordHash(password),
"authenticationAuthority": "local" # Change to local auth
}
return self.updateUser(userId, userData)
except Exception as e:
logger.error(f"Error setting up local authentication: {str(e)}")
raise ValueError(f"Failed to set up local authentication: {str(e)}")
def getInterface(currentUser: Dict[str, Any]) -> GatewayInterface:
""" """
Returns a GatewayInterface instance. Returns a GatewayInterface instance for the current user.
If currentUser is provided, initializes with user context. Handles initialization of database and records.
Otherwise, returns an instance with only database access.
""" """
mandateId = currentUser.get("mandateId")
userId = currentUser.get("id")
if not mandateId or not userId:
raise ValueError("Invalid user context: mandateId and id are required")
# Create context key
contextKey = f"{mandateId}_{userId}"
# Create new instance if not exists # Create new instance if not exists
if "default" not in _gatewayInterfaces: if contextKey not in _gatewayInterfaces:
_gatewayInterfaces["default"] = GatewayInterface() _gatewayInterfaces[contextKey] = GatewayInterface(currentUser)
interface = _gatewayInterfaces["default"] return _gatewayInterfaces[contextKey]
if currentUser: def getRootUser() -> Dict[str, Any]:
interface.setUserContext(currentUser) """
else: Returns the root user from the database.
logger.info("Returning interface without user context") This is the user with the initial ID in the users table.
"""
try:
readInterface = getInterface()
# Get the initial user ID
initialUserId = readInterface.db.getInitialId("users")
if not initialUserId:
raise ValueError("No initial user ID found in database")
return interface # Get the user record
users = readInterface.db.getRecordset("users", recordFilter={"id": initialUserId})
if not users:
raise ValueError(f"Root user with ID {initialUserId} not found in database")
return users[0]
except Exception as e:
logger.error(f"Error getting root user: {str(e)}")
raise ValueError(f"Failed to get root user: {str(e)}")
def getRootInterface() -> GatewayInterface:
"""
Returns a GatewayInterface instance with root privileges.
This is used for initial setup and user creation.
"""
global _rootGatewayInterface
if _rootGatewayInterface is None:
rootUser = getRootUser()
_rootGatewayInterface = GatewayInterface(rootUser)
return _rootGatewayInterface

View file

@ -56,6 +56,30 @@ class Mandate(BaseModel):
"language": Label(default="Language", translations={"en": "Language", "fr": "Langue"}) "language": Label(default="Language", translations={"en": "Language", "fr": "Langue"})
} }
class UserConnection(BaseModel):
"""Data model for a user's connection to an external service"""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the connection")
authority: str = Field(description="Authentication authority (microsoft, google, etc.)")
externalId: str = Field(description="User ID in the external system")
externalUsername: str = Field(description="Username in the external system")
externalEmail: Optional[str] = Field(None, description="Email in the external system")
connectedAt: datetime = Field(default_factory=datetime.now, description="When the connection was established")
label: Label = Field(
default=Label(default="User Connection", translations={"en": "User Connection", "fr": "Connexion utilisateur"}),
description="Label for the class"
)
# Labels for attributes
fieldLabels: Dict[str, Label] = {
"id": Label(default="ID", translations={}),
"authority": Label(default="Authority", translations={"en": "Authority", "fr": "Autorité"}),
"externalId": Label(default="External ID", translations={"en": "External ID", "fr": "ID externe"}),
"externalUsername": Label(default="External Username", translations={"en": "External Username", "fr": "Nom d'utilisateur externe"}),
"externalEmail": Label(default="External Email", translations={"en": "External Email", "fr": "Email externe"}),
"connectedAt": Label(default="Connected At", translations={"en": "Connected At", "fr": "Connecté le"})
}
class User(BaseModel): class User(BaseModel):
"""Data model for a user""" """Data model for a user"""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the user") id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the user")
@ -65,8 +89,9 @@ class User(BaseModel):
language: str = Field(description="Preferred language of the user") language: str = Field(description="Preferred language of the user")
disabled: Optional[bool] = Field(False, description="Indicates whether the user is disabled") disabled: Optional[bool] = Field(False, description="Indicates whether the user is disabled")
privilege: str = Field(description="Permission level") #sysadmin,admin,user privilege: str = Field(description="Permission level") #sysadmin,admin,user
authenticationAuthority: str = Field(default="local", description="Authentication authority (local, microsoft)") authenticationAuthority: str = Field(default="local", description="Primary authentication authority (local, microsoft)")
mandateId: str = Field(description="ID of the mandate this user belongs to") mandateId: str = Field(description="ID of the mandate this user belongs to")
connections: List[UserConnection] = Field(default_factory=list, description="List of external service connections")
label: Label = Field( label: Label = Field(
default=Label(default="User", translations={"en": "User", "fr": "Utilisateur"}), default=Label(default="User", translations={"en": "User", "fr": "Utilisateur"}),
@ -83,7 +108,8 @@ class User(BaseModel):
"language": Label(default="Language", translations={"en": "Language", "fr": "Langue"}), "language": Label(default="Language", translations={"en": "Language", "fr": "Langue"}),
"disabled": Label(default="Disabled", translations={"en": "Disabled", "fr": "Désactivé"}), "disabled": Label(default="Disabled", translations={"en": "Disabled", "fr": "Désactivé"}),
"privilege": Label(default="Permission level", translations={"en": "Access level", "fr": "Niveau d'accès"}), "privilege": Label(default="Permission level", translations={"en": "Access level", "fr": "Niveau d'accès"}),
"authenticationAuthority": Label(default="Authentication Authority", translations={"en": "Authentication Authority", "fr": "Autorité d'authentification"}) "authenticationAuthority": Label(default="Authentication Authority", translations={"en": "Authentication Authority", "fr": "Autorité d'authentification"}),
"connections": Label(default="External Connections", translations={"en": "External Connections", "fr": "Connexions externes"})
} }

View file

@ -0,0 +1,113 @@
"""
Access control module for Google interface.
Handles user access management and permission checks for Google tokens.
"""
from typing import Dict, Any, List, Optional
class GoogleAccess:
"""
Access control class for Google interface.
Handles user access management and permission checks for Google tokens.
"""
def __init__(self, currentUser: Dict[str, Any], db):
"""Initialize with user context."""
self.currentUser = currentUser
self._mandateId = currentUser.get("_mandateId")
self._userId = currentUser.get("id")
if not self._mandateId or not self._userId:
raise ValueError("Invalid user context: _mandateId and id are required")
self.db = db
def uam(self, table: str, recordset: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
"""
Unified user access management function that filters data based on user privileges
and adds access control attributes.
Args:
table: Name of the table
recordset: Recordset to filter based on access rules
Returns:
Filtered recordset with access control attributes
"""
userPrivilege = self.currentUser.get("privilege", "user")
filtered_records = []
# Apply filtering based on privilege
if userPrivilege == "sysadmin":
filtered_records = recordset # System admins see all records
elif userPrivilege == "admin":
# Admins see records in their mandate
filtered_records = [r for r in recordset if r.get("_mandateId") == self._mandateId]
else: # Regular users
# Users only see their own Google tokens
filtered_records = [r for r in recordset
if r.get("_mandateId") == self._mandateId and r.get("_userId") == self._userId]
# Add access control attributes to each record
for record in filtered_records:
record_id = record.get("id")
# Set access control flags based on user permissions
if table == "googleTokens":
record["_hideView"] = False # Everyone can view their own tokens
record["_hideEdit"] = not self.canModify("googleTokens", record_id)
record["_hideDelete"] = not self.canModify("googleTokens", record_id)
else:
# Default access control for other tables
record["_hideView"] = False
record["_hideEdit"] = not self.canModify(table, record_id)
record["_hideDelete"] = not self.canModify(table, record_id)
return filtered_records
def canModify(self, table: str, recordId: Optional[str] = None) -> bool:
"""
Checks if the current user can modify (create/update/delete) records in a table.
Args:
table: Name of the table
recordId: Optional record ID for specific record check
Returns:
Boolean indicating permission
"""
userPrivilege = self.currentUser.get("privilege", "user")
# System admins can modify anything
if userPrivilege == "sysadmin":
return True
# Check specific record permissions
if recordId is not None:
# Get the record to check ownership
records = self.db.getRecordset(table, recordFilter={"id": recordId})
if not records:
return False
record = records[0]
# Admins can modify anything in their mandate
if userPrivilege == "admin" and record.get("_mandateId") == self._mandateId:
return True
# Users can only modify their own Google tokens
if (record.get("_mandateId") == self._mandateId and
record.get("_userId") == self._userId):
return True
return False
else:
# For general table modify permission (e.g., create)
# Admins can create anything in their mandate
if userPrivilege == "admin":
return True
# Regular users can create their own Google tokens
if table == "googleTokens":
return True
return False

View file

@ -0,0 +1,287 @@
"""
Google interface for handling Google authentication and API operations.
"""
import logging
import requests
from typing import Dict, Any, Optional, Tuple
from datetime import datetime
import secrets
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import Flow
from google.auth.transport.requests import Request
import os
from modules.shared.configuration import APP_CONFIG
from modules.interfaces.googleModel import GoogleToken, GoogleUserInfo, GoogleConfig
from modules.connectors.connectorDbJson import DatabaseConnector
from modules.interfaces.googleAccess import GoogleAccess
from modules.interfaces.gatewayInterface import getRootUser
logger = logging.getLogger(__name__)
# Singleton factory for GoogleInterface instances per context
_googleInterfaces = {}
# Root interface instance
_rootGoogleInterface = None
class GoogleInterface:
"""Interface for Google authentication and API operations"""
def __init__(self, currentUser: Dict[str, Any] = None):
"""Initialize the Google interface"""
# Initialize variables
self.currentUser = currentUser
self.mandateId = currentUser.get("mandateId") if currentUser else None
self.userId = currentUser.get("id") if currentUser else None
self.access = None # Will be set when user context is provided
# Initialize configuration
self.clientId = APP_CONFIG.get("Service_GOOGLE_CLIENT_ID")
self.clientSecret = APP_CONFIG.get("Service_GOOGLE_CLIENT_SECRET")
self.redirectUri = APP_CONFIG.get("Service_GOOGLE_REDIRECT_URI")
self.authorityUrl = "https://accounts.google.com"
self.tokenUrl = "https://oauth2.googleapis.com/token"
self.userInfoUrl = "https://www.googleapis.com/oauth2/v3/userinfo"
self.scopes = ["openid", "profile", "email"]
# Initialize database
self._initializeDatabase()
# Initialize OAuth2 flow
self.flow = Flow.from_client_config(
{
"web": {
"client_id": self.clientId,
"client_secret": self.clientSecret,
"auth_uri": f"{self.authorityUrl}/o/oauth2/auth",
"token_uri": self.tokenUrl,
"redirect_uris": [self.redirectUri]
}
},
scopes=self.scopes
)
# Set user context if provided
if currentUser:
self.setUserContext(currentUser)
def _initializeDatabase(self):
"""Initializes the database connection."""
try:
# Get configuration values with defaults
dbHost = APP_CONFIG.get("DB_GOOGLE_HOST", "data")
dbDatabase = APP_CONFIG.get("DB_GOOGLE_DATABASE", "google")
dbUser = APP_CONFIG.get("DB_GOOGLE_USER")
dbPassword = APP_CONFIG.get("DB_GOOGLE_PASSWORD_SECRET")
# Ensure the database directory exists
os.makedirs(dbHost, exist_ok=True)
self.db = DatabaseConnector(
dbHost=dbHost,
dbDatabase=dbDatabase,
dbUser=dbUser,
dbPassword=dbPassword,
mandateId=self.mandateId,
userId=self.userId
)
# Set context
self.db.updateContext(self.mandateId, self.userId)
logger.info("Database initialized successfully")
except Exception as e:
logger.error(f"Failed to initialize database: {str(e)}")
raise
def initiateLogin(self) -> str:
"""Initiate Google login flow"""
try:
# Generate auth URL
auth_url, _ = self.flow.authorization_url(
access_type="offline",
include_granted_scopes="true",
state=self._generateState()
)
return auth_url
except Exception as e:
logger.error(f"Error initiating Google login: {str(e)}")
return None
def handleAuthCallback(self, code: str) -> Optional[GoogleToken]:
"""Handle Google OAuth callback"""
try:
# Exchange code for token
self.flow.fetch_token(code=code)
credentials = self.flow.credentials
# Get user info
user_info = self.getUserInfoFromToken(credentials.token)
if not user_info:
return None
# Create token model
token = GoogleToken(
access_token=credentials.token,
refresh_token=credentials.refresh_token,
expires_in=credentials.expiry.timestamp() - datetime.now().timestamp(),
token_type=credentials.token_type,
expires_at=credentials.expiry.timestamp(),
user_info=user_info.model_dump(),
mandateId=self.mandateId,
userId=self.userId
)
return token
except Exception as e:
logger.error(f"Error handling auth callback: {str(e)}")
return None
def verifyToken(self, token: str) -> bool:
"""Verify Google token"""
try:
# Get user info from token
user_info = self.getUserInfoFromToken(token)
if not user_info:
return False
# Get current user's Google connection
user = self.db.getRecordset("users", recordFilter={"id": self.userId})[0]
google_connection = next((conn for conn in user.get("connections", [])
if conn.get("authority") == "google"), None)
if not google_connection:
return False
# Verify the token belongs to this user
return user_info.id == google_connection.get("externalId")
except Exception as e:
logger.error(f"Error verifying Google token: {str(e)}")
return False
def getUserInfoFromToken(self, token: str) -> Optional[GoogleUserInfo]:
"""Get user info from Google API"""
try:
# Call Google API
response = requests.get(
self.userInfoUrl,
headers={"Authorization": f"Bearer {token}"}
)
if response.status_code != 200:
logger.error(f"Failed to get user info: {response.text}")
return None
data = response.json()
# Create user info model
return GoogleUserInfo(
id=data["sub"], # Google uses 'sub' as the unique identifier
email=data["email"],
name=data.get("name", ""),
picture=data.get("picture") # Google provides profile picture URL
)
except Exception as e:
logger.error(f"Error getting user info: {str(e)}")
return None
def refreshToken(self, refresh_token: str) -> Optional[GoogleToken]:
"""Refresh Google token"""
try:
# Create credentials object
credentials = Credentials(
None, # No access token
refresh_token=refresh_token,
token_uri=self.tokenUrl,
client_id=self.clientId,
client_secret=self.clientSecret
)
# Refresh token
credentials.refresh(Request())
# Get user info
user_info = self.getUserInfoFromToken(credentials.token)
if not user_info:
return None
# Create token model
token = GoogleToken(
access_token=credentials.token,
refresh_token=credentials.refresh_token or refresh_token,
expires_in=credentials.expiry.timestamp() - datetime.now().timestamp(),
token_type=credentials.token_type,
expires_at=credentials.expiry.timestamp(),
user_info=user_info.model_dump(),
mandateId=self.mandateId,
userId=self.userId
)
return token
except Exception as e:
logger.error(f"Error refreshing token: {str(e)}")
return None
def _generateState(self) -> str:
"""Generate secure state token"""
return secrets.token_urlsafe(32)
def setUserContext(self, currentUser: Dict[str, Any]):
"""Set user context for the interface"""
if not currentUser:
logger.info("Initializing interface without user context")
return
self.currentUser = currentUser
self.mandateId = currentUser.get("mandateId")
self.userId = currentUser.get("id")
if not self.mandateId or not self.userId:
raise ValueError("Invalid user context: mandateId and id are required")
# Initialize access control with user context
self.access = GoogleAccess(self.currentUser, self.db)
# Update database context
self.db.updateContext(self.mandateId, self.userId)
logger.debug(f"User context set: userId={self.userId}")
def getRootInterface() -> GoogleInterface:
"""
Returns a GoogleInterface instance with root privileges.
This is used for initial setup and user creation.
"""
global _rootGoogleInterface
if _rootGoogleInterface is None:
# Get root user from gateway
rootUser = getRootUser()
_rootGoogleInterface = GoogleInterface(rootUser)
return _rootGoogleInterface
def getInterface(currentUser: Dict[str, Any] = None) -> GoogleInterface:
"""
Returns a GoogleInterface instance.
If currentUser is provided, initializes with user context.
Otherwise, returns an instance with only database access.
"""
# Create new instance if not exists
if "default" not in _googleInterfaces:
_googleInterfaces["default"] = GoogleInterface(currentUser or {})
interface = _googleInterfaces["default"]
if currentUser:
interface.setUserContext(currentUser)
else:
logger.info("Returning interface without user context")
return interface

View file

@ -0,0 +1,35 @@
"""
Models for Google authentication and API operations.
"""
from pydantic import BaseModel, Field
from typing import Optional, Dict, Any
from datetime import datetime
class GoogleToken(BaseModel):
"""Model for Google OAuth tokens"""
access_token: str
refresh_token: Optional[str] = None
expires_in: int
token_type: str = "bearer"
expires_at: float
user_info: Dict[str, Any]
mandateId: str
userId: str
class GoogleUserInfo(BaseModel):
"""Model for Google user information"""
id: str # Google uses 'sub' as the unique identifier
email: str
name: str
picture: Optional[str] = None # Google provides profile picture URL
class GoogleConfig(BaseModel):
"""Configuration for Google authentication service"""
client_id: str
client_secret: str
redirect_uri: str
scopes: list[str]
authority_url: str = "https://accounts.google.com"
token_url: str = "https://oauth2.googleapis.com/token"
user_info_url: str = "https://www.googleapis.com/oauth2/v3/userinfo"

View file

@ -13,48 +13,52 @@ import secrets
import os import os
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
from modules.interfaces.msftModel import MsftToken, MsftUserInfo from .msftModel import MsftToken, MsftUserInfo, MsftConfig
from modules.connectors.connectorDbJson import DatabaseConnector from modules.connectors.connectorDbJson import DatabaseConnector
from modules.interfaces.msftAccess import MsftAccess from .msftAccess import MsftAccess
from modules.interfaces.gatewayInterface import getRootUser
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# Singleton factory for MsftInterface instances per context # Singleton factory for MsftInterface instances per context
_msftInterfaces = {} _msftInterfaces = {}
# Root interface instance
_rootMsftInterface = None
class MsftInterface: class MsftInterface:
"""Interface for Microsoft authentication and Graph API operations""" """Interface for Microsoft authentication and Graph API operations"""
def __init__(self, currentUser: Dict[str, Any]): def __init__(self, currentUser: Dict[str, Any] = None):
"""Initialize the Microsoft interface""" """Initialize the Microsoft interface"""
# Initialize variables
self.currentUser = currentUser self.currentUser = currentUser
self._mandateId = currentUser.get("_mandateId") self.mandateId = currentUser.get("mandateId") if currentUser else None
self._userId = currentUser.get("id") self.userId = currentUser.get("id") if currentUser else None
self.access = None # Will be set when user context is provided
if not self._mandateId or not self._userId:
raise ValueError("Invalid user context: _mandateId and id are required")
# Initialize configuration # Initialize configuration
self.client_id = APP_CONFIG.get("Service_MSFT_CLIENT_ID") self.clientId = APP_CONFIG.get("Service_MSFT_CLIENT_ID")
self.client_secret = APP_CONFIG.get("Service_MSFT_CLIENT_SECRET") self.clientSecret = APP_CONFIG.get("Service_MSFT_CLIENT_SECRET")
self.tenant_id = APP_CONFIG.get("Service_MSFT_TENANT_ID", "common") self.tenantId = APP_CONFIG.get("Service_MSFT_TENANT_ID", "common")
self.redirect_uri = APP_CONFIG.get("Service_MSFT_REDIRECT_URI") self.redirectUri = APP_CONFIG.get("Service_MSFT_REDIRECT_URI")
self.authority = f"https://login.microsoftonline.com/{self.tenant_id}" self.authority = f"https://login.microsoftonline.com/{self.tenantId}"
self.scopes = ["Mail.ReadWrite", "User.Read"] self.scopes = ["Mail.ReadWrite", "User.Read"]
# Initialize database # Initialize database
self._initializeDatabase() self._initializeDatabase()
# Initialize access control
self.access = MsftAccess(self.currentUser, self.db)
# Initialize MSAL application # Initialize MSAL application
self.msal_app = msal.ConfidentialClientApplication( self.msal_app = msal.ConfidentialClientApplication(
self.client_id, self.clientId,
authority=self.authority, authority=self.authority,
client_credential=self.client_secret client_credential=self.clientSecret
) )
# Set user context if provided
if currentUser:
self.setUserContext(currentUser)
def _initializeDatabase(self): def _initializeDatabase(self):
"""Initializes the database connection.""" """Initializes the database connection."""
try: try:
@ -72,12 +76,12 @@ class MsftInterface:
dbDatabase=dbDatabase, dbDatabase=dbDatabase,
dbUser=dbUser, dbUser=dbUser,
dbPassword=dbPassword, dbPassword=dbPassword,
_mandateId=self._mandateId, mandateId=self.mandateId,
_userId=self._userId userId=self.userId
) )
# Set context # Set context
self.db.updateContext(self._mandateId, self._userId) self.db.updateContext(self.mandateId, self.userId)
logger.info("Database initialized successfully") logger.info("Database initialized successfully")
except Exception as e: except Exception as e:
@ -111,156 +115,147 @@ class MsftInterface:
""" """
return self.access.canModify(table, recordId) return self.access.canModify(table, recordId)
def getMsftToken(self) -> Optional[MsftToken]: def initiateLogin(self) -> str:
"""Get Microsoft token for current user""" """Initiate Microsoft login flow"""
try: try:
tokens = self.db.getRecordset("msftTokens", recordFilter={ # Generate auth URL
"_mandateId": self._mandateId, auth_url = self.msal_app.get_authorization_request_url(
"_userId": self._userId scopes=self.scopes,
}) redirect_uri=self.redirectUri,
if not tokens: state=self._generateState()
return None )
return auth_url
# Apply access control
filtered_tokens = self._uam("msftTokens", tokens)
if not filtered_tokens:
return None
return MsftToken(**filtered_tokens[0])
except Exception as e: except Exception as e:
logger.error(f"Error getting Microsoft token: {str(e)}") logger.error(f"Error initiating Microsoft login: {str(e)}")
return None return None
def saveMsftToken(self, token_data: Dict[str, Any]) -> bool: def handleAuthCallback(self, code: str) -> Optional[MsftToken]:
"""Save Microsoft token data""" """Handle Microsoft OAuth callback"""
try: try:
# Check if user can modify tokens # Get token from code
if not self._canModify("msftTokens"): token_response = self.msal_app.acquire_token_by_authorization_code(
raise PermissionError("No permission to save Microsoft token") code,
scopes=self.scopes,
# Add user and mandate IDs to token data redirect_uri=self.redirectUri
token_data["_mandateId"] = self._mandateId
token_data["_userId"] = self._userId
# Validate token data using Pydantic model
try:
token = MsftToken(**token_data)
except Exception as e:
raise ValueError(f"Invalid token data: {str(e)}")
# Check if token already exists
existing_token = self.getMsftToken()
if existing_token:
# Update existing token
return self.db.recordModify("msftTokens", existing_token.id, token.model_dump())
else:
# Create new token record
return self.db.recordCreate("msftTokens", token.model_dump())
except Exception as e:
logger.error(f"Error saving Microsoft token: {str(e)}")
return False
def deleteMsftToken(self) -> bool:
"""Delete Microsoft token for current user"""
try:
if not self._canModify("msftTokens"):
raise PermissionError("No permission to delete Microsoft token")
existing_token = self.getMsftToken()
if existing_token:
return self.db.recordDelete("msftTokens", existing_token.id)
return True
except Exception as e:
logger.error(f"Error deleting Microsoft token: {str(e)}")
return False
def getCurrentUserToken(self) -> Tuple[Optional[MsftUserInfo], Optional[str]]:
"""Get current user's Microsoft token and info"""
try:
token_data = self.getMsftToken()
if not token_data:
return None, None
# Verify token is still valid
if not self.verifyToken(token_data.access_token):
if not self.refreshToken(token_data):
return None, None
token_data = self.getMsftToken()
user_info = token_data.user_info
if not user_info:
user_info = self.getUserInfoFromToken(token_data.access_token)
if user_info:
token_data.user_info = user_info
self.saveMsftToken(token_data.model_dump())
return MsftUserInfo(**user_info) if user_info else None, token_data.access_token
except Exception as e:
logger.error(f"Error getting current user token: {str(e)}")
return None, None
def verifyToken(self, token: str) -> bool:
"""Verify the access token is valid"""
try:
headers = {
'Authorization': f'Bearer {token}',
'Content-Type': 'application/json'
}
response = requests.get('https://graph.microsoft.com/v1.0/me', headers=headers)
return response.status_code == 200
except Exception as e:
logger.error(f"Error verifying token: {str(e)}")
return False
def refreshToken(self, token_data: MsftToken) -> bool:
"""Refresh the access token using the stored refresh token"""
try:
if not token_data or not token_data.refresh_token:
return False
result = self.msal_app.acquire_token_by_refresh_token(
token_data.refresh_token,
scopes=self.scopes
) )
if "error" in result: if "error" in token_response:
logger.error(f"Error refreshing token: {result.get('error')}") logger.error(f"Token acquisition failed: {token_response['error']}")
return False return None
# Update token data # Get user info
token_data.access_token = result["access_token"] user_info = self.getUserInfoFromToken(token_response["access_token"])
if "refresh_token" in result: if not user_info:
token_data.refresh_token = result["refresh_token"] return None
return self.saveMsftToken(token_data.model_dump()) # Create token model
token = MsftToken(
access_token=token_response["access_token"],
refresh_token=token_response.get("refresh_token", ""),
expires_in=token_response.get("expires_in", 0),
token_type=token_response.get("token_type", "bearer"),
expires_at=datetime.now().timestamp() + token_response.get("expires_in", 0),
user_info=user_info.model_dump(),
mandateId=self.mandateId,
userId=self.userId
)
return token
except Exception as e: except Exception as e:
logger.error(f"Error refreshing token: {str(e)}") logger.error(f"Error handling auth callback: {str(e)}")
return None
def verifyToken(self, token: str) -> bool:
"""Verify Microsoft token"""
try:
# Get user info from token
user_info = self.getUserInfoFromToken(token)
if not user_info:
return False
# Get current user's Microsoft connection
user = self.db.getRecordset("users", recordFilter={"id": self.userId})[0]
msft_connection = next((conn for conn in user.get("connections", [])
if conn.get("authority") == "microsoft"), None)
if not msft_connection:
return False
# Verify the token belongs to this user
return user_info.id == msft_connection.get("externalId")
except Exception as e:
logger.error(f"Error verifying Microsoft token: {str(e)}")
return False return False
def getUserInfoFromToken(self, access_token: str) -> Optional[Dict[str, Any]]: def getUserInfoFromToken(self, token: str) -> Optional[MsftUserInfo]:
"""Get user information using the access token""" """Get user info from Microsoft Graph"""
try: try:
headers = { # Call Microsoft Graph API
'Authorization': f'Bearer {access_token}', response = requests.get(
'Content-Type': 'application/json' "https://graph.microsoft.com/v1.0/me",
} headers={"Authorization": f"Bearer {token}"}
response = requests.get('https://graph.microsoft.com/v1.0/me', headers=headers) )
if response.status_code == 200:
user_data = response.json() if response.status_code != 200:
return { logger.error(f"Failed to get user info: {response.text}")
"name": user_data.get("displayName", ""), return None
"email": user_data.get("userPrincipalName", ""),
"id": user_data.get("id", "") data = response.json()
}
return None # Create user info model
return MsftUserInfo(
id=data["id"],
email=data.get("mail") or data.get("userPrincipalName"),
name=data.get("displayName", ""),
picture=None # Microsoft Graph doesn't provide profile picture by default
)
except Exception as e: except Exception as e:
logger.error(f"Error getting user info: {str(e)}") logger.error(f"Error getting user info: {str(e)}")
return None return None
def refreshToken(self, refresh_token: str) -> Optional[MsftToken]:
"""Refresh Microsoft token"""
try:
# Refresh token
token_response = self.msal_app.acquire_token_by_refresh_token(
refresh_token,
scopes=self.scopes
)
if "error" in token_response:
logger.error(f"Token refresh failed: {token_response['error']}")
return None
# Get user info
user_info = self.getUserInfoFromToken(token_response["access_token"])
if not user_info:
return None
# Create token model
token = MsftToken(
access_token=token_response["access_token"],
refresh_token=token_response.get("refresh_token", refresh_token),
expires_in=token_response.get("expires_in", 0),
token_type=token_response.get("token_type", "bearer"),
expires_at=datetime.now().timestamp() + token_response.get("expires_in", 0),
user_info=user_info.model_dump(),
mandateId=self.mandateId,
userId=self.userId
)
return token
except Exception as e:
logger.error(f"Error refreshing token: {str(e)}")
return None
def _generateState(self) -> str:
"""Generate secure state token"""
return secrets.token_urlsafe(32)
def createDraftEmail(self, recipient: str, subject: str, body: str, attachments: List[Dict[str, Any]] = None) -> bool: def createDraftEmail(self, recipient: str, subject: str, body: str, attachments: List[Dict[str, Any]] = None) -> bool:
"""Create a draft email using Microsoft Graph API""" """Create a draft email using Microsoft Graph API"""
try: try:
@ -340,70 +335,186 @@ class MsftInterface:
logger.error(f"Error creating draft email: {str(e)}") logger.error(f"Error creating draft email: {str(e)}")
return False return False
def initiateLogin(self) -> str: def saveMsftToken(self, token_data: Dict[str, Any]) -> bool:
"""Initiate Microsoft login flow""" """
Save Microsoft token data to the database.
Args:
token_data: Token data to save
Returns:
bool: True if successful, False otherwise
"""
try: try:
state = secrets.token_urlsafe(32) # Get existing token if any
auth_url = self.msal_app.get_authorization_request_url( existing_tokens = self.db.getRecordset(
self.scopes, "msftTokens",
state=state, recordFilter={
redirect_uri=self.redirect_uri "mandateId": self.mandateId,
) "userId": self.userId
return auth_url }
except Exception as e:
logger.error(f"Error initiating login: {str(e)}")
return None
def handleAuthCallback(self, code: str) -> Optional[Dict[str, Any]]:
"""Handle Microsoft OAuth callback"""
try:
token_response = self.msal_app.acquire_token_by_authorization_code(
code,
self.scopes,
redirect_uri=self.redirect_uri
) )
if "error" in token_response: if existing_tokens:
logger.error(f"Token acquisition failed: {token_response['error']}") # Update existing token
return None token_id = existing_tokens[0]["id"]
success = self.db.updateRecord(
"msftTokens",
token_id,
token_data
)
else:
# Create new token record
success = self.db.createRecord(
"msftTokens",
token_data
)
user_info = self.getUserInfoFromToken(token_response["access_token"]) return success
if not user_info:
return None
# Create MsftToken instance
token_data = MsftToken(
access_token=token_response["access_token"],
refresh_token=token_response.get("refresh_token", ""),
expires_in=token_response.get("expires_in", 0),
token_type=token_response.get("token_type", "bearer"),
expires_at=datetime.now().timestamp() + token_response.get("expires_in", 0),
user_info=user_info,
_mandateId=self._mandateId,
_userId=self._userId
)
return token_data.model_dump()
except Exception as e: except Exception as e:
logger.error(f"Error handling auth callback: {str(e)}") logger.error(f"Error saving Microsoft token: {str(e)}")
return False
def getMsftToken(self) -> Optional[Dict[str, Any]]:
"""
Get Microsoft token data for current user.
Returns:
Optional[Dict[str, Any]]: Token data if found, None otherwise
"""
try:
tokens = self.db.getRecordset(
"msftTokens",
recordFilter={
"mandateId": self.mandateId,
"userId": self.userId
}
)
if not tokens:
return None
return tokens[0]
except Exception as e:
logger.error(f"Error getting Microsoft token: {str(e)}")
return None return None
def getInterface(currentUser: Dict[str, Any]) -> MsftInterface: def getCurrentUserToken(self) -> Tuple[Optional[Dict[str, Any]], Optional[str]]:
""" """
Returns a MsftInterface instance for the current user. Get current user's Microsoft token and user info.
Handles initialization of database and records.
"""
mandateId = currentUser.get("_mandateId")
userId = currentUser.get("id")
if not mandateId or not userId:
raise ValueError("Invalid user context: _mandateId and id are required")
# Create context key Returns:
contextKey = f"{mandateId}_{userId}" Tuple[Optional[Dict[str, Any]], Optional[str]]: User info and access token
"""
try:
token_data = self.getMsftToken()
if not token_data:
return None, None
# Check if token needs refresh
if datetime.now().timestamp() >= token_data["expires_at"]:
if not token_data.get("refresh_token"):
return None, None
# Refresh token
new_token = self.refreshToken(token_data["refresh_token"])
if not new_token:
return None, None
# Save new token
self.saveMsftToken(new_token.model_dump())
token_data = new_token.model_dump()
return token_data["user_info"], token_data["access_token"]
except Exception as e:
logger.error(f"Error getting current user token: {str(e)}")
return None, None
def deleteMsftToken(self) -> bool:
"""
Delete Microsoft token for current user.
Returns:
bool: True if successful, False otherwise
"""
try:
# Get existing token
existing_tokens = self.db.getRecordset(
"msftTokens",
recordFilter={
"mandateId": self.mandateId,
"userId": self.userId
}
)
if not existing_tokens:
return True # No token to delete
# Delete token
success = self.db.deleteRecord(
"msftTokens",
existing_tokens[0]["id"]
)
return success
except Exception as e:
logger.error(f"Error deleting Microsoft token: {str(e)}")
return False
def setUserContext(self, currentUser: Dict[str, Any]):
"""Set user context for the interface"""
if not currentUser:
logger.info("Initializing interface without user context")
return
self.currentUser = currentUser
self.mandateId = currentUser.get("mandateId")
self.userId = currentUser.get("id")
if not self.mandateId or not self.userId:
raise ValueError("Invalid user context: mandateId and id are required")
# Initialize access control with user context
self.access = MsftAccess(self.currentUser, self.db)
# Update database context
self.db.updateContext(self.mandateId, self.userId)
logger.debug(f"User context set: userId={self.userId}")
def getRootInterface() -> MsftInterface:
"""
Returns a MsftInterface instance with root privileges.
This is used for initial setup and user creation.
"""
global _rootMsftInterface
if _rootMsftInterface is None:
# Get root user from gateway
rootUser = getRootUser()
_rootMsftInterface = MsftInterface(rootUser)
return _rootMsftInterface
def getInterface(currentUser: Dict[str, Any] = None) -> MsftInterface:
"""
Returns a MsftInterface instance.
If currentUser is provided, initializes with user context.
Otherwise, returns an instance with only database access.
"""
# Create new instance if not exists # Create new instance if not exists
if contextKey not in _msftInterfaces: if "default" not in _msftInterfaces:
_msftInterfaces[contextKey] = MsftInterface(currentUser) _msftInterfaces["default"] = MsftInterface(currentUser or {})
return _msftInterfaces[contextKey] interface = _msftInterfaces["default"]
if currentUser:
interface.setUserContext(currentUser)
else:
logger.info("Returning interface without user context")
return interface

View file

@ -1,10 +1,38 @@
""" """
Data models for Microsoft integration. Models for Microsoft authentication and Graph API operations.
""" """
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from typing import List, Dict, Any, Optional from typing import Optional, Dict, Any
from datetime import datetime from datetime import datetime
import uuid
class MsftToken(BaseModel):
"""Model for Microsoft OAuth tokens"""
access_token: str
refresh_token: Optional[str] = None
expires_in: int
token_type: str = "bearer"
expires_at: float
user_info: Dict[str, Any]
mandateId: str
userId: str
class MsftUserInfo(BaseModel):
"""Model for Microsoft user information"""
id: str
email: str
name: str
picture: Optional[str] = None # Microsoft Graph doesn't provide profile picture by default
class MsftConfig(BaseModel):
"""Configuration for Microsoft authentication service"""
client_id: str
client_secret: str
redirect_uri: str
scopes: list[str]
authority_url: str = "https://login.microsoftonline.com/common"
token_url: str = "https://login.microsoftonline.com/common/oauth2/v2.0/token"
user_info_url: str = "https://graph.microsoft.com/v1.0/me"
# Get all attributes of the model # Get all attributes of the model
def getModelAttributes(modelClass): def getModelAttributes(modelClass):
@ -24,54 +52,6 @@ class Label(BaseModel):
return self.translations[language] return self.translations[language]
return self.default return self.default
class MsftToken(BaseModel):
"""Data model for Microsoft authentication token"""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the token")
access_token: str = Field(description="Microsoft access token")
refresh_token: str = Field(description="Microsoft refresh token")
expires_in: int = Field(description="Token expiration time in seconds")
token_type: str = Field(description="Type of token (usually 'bearer')")
expires_at: float = Field(description="Timestamp when token expires")
user_info: Optional[Dict[str, Any]] = Field(None, description="User information from Microsoft")
_mandateId: str = Field(description="Mandate ID associated with the token")
_userId: str = Field(description="User ID associated with the token")
label: Label = Field(
default=Label(default="Microsoft Token", translations={"en": "Microsoft Token", "fr": "Jeton Microsoft"}),
description="Label for the class"
)
# Labels for attributes
fieldLabels: Dict[str, Label] = {
"id": Label(default="ID", translations={}),
"access_token": Label(default="Access Token", translations={"en": "Access Token", "fr": "Jeton d'accès"}),
"refresh_token": Label(default="Refresh Token", translations={"en": "Refresh Token", "fr": "Jeton de rafraîchissement"}),
"expires_in": Label(default="Expires In", translations={"en": "Expires In", "fr": "Expire dans"}),
"token_type": Label(default="Token Type", translations={"en": "Token Type", "fr": "Type de jeton"}),
"expires_at": Label(default="Expires At", translations={"en": "Expires At", "fr": "Expire à"}),
"user_info": Label(default="User Info", translations={"en": "User Info", "fr": "Info utilisateur"}),
"_mandateId": Label(default="Mandate ID", translations={"en": "Mandate ID", "fr": "ID de mandat"}),
"_userId": Label(default="User ID", translations={"en": "User ID", "fr": "ID utilisateur"})
}
class MsftUserInfo(BaseModel):
"""Data model for Microsoft user information"""
name: str = Field(description="User's display name")
email: str = Field(description="User's email address")
id: str = Field(description="User's Microsoft ID")
label: Label = Field(
default=Label(default="Microsoft User Info", translations={"en": "Microsoft User Info", "fr": "Info utilisateur Microsoft"}),
description="Label for the class"
)
# Labels for attributes
fieldLabels: Dict[str, Label] = {
"name": Label(default="Name", translations={"en": "Name", "fr": "Nom"}),
"email": Label(default="Email", translations={"en": "Email", "fr": "E-mail"}),
"id": Label(default="ID", translations={})
}
# Response models for Microsoft routes # Response models for Microsoft routes
class MsftAuthStatus(BaseModel): class MsftAuthStatus(BaseModel):
"""Response model for Microsoft authentication status""" """Response model for Microsoft authentication status"""

View file

@ -145,13 +145,13 @@ async def get_file(
async def update_file( async def update_file(
file_id: str, file_id: str,
file_data: FileItem, file_data: FileItem,
current_user: Dict[str, Any] = Depends(auth.getCurrentActiveUser) currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
): ):
""" """
Update file metadata Update file metadata
""" """
try: try:
interfaceLucydom = lucydomInterface.getInterface(current_user) interfaceLucydom = lucydomInterface.getInterface(currentUser)
# Get the file from the database # Get the file from the database
file = interfaceLucydom.getFile(file_id) file = interfaceLucydom.getFile(file_id)
@ -159,7 +159,7 @@ async def update_file(
raise HTTPException(status_code=404, detail="File not found") raise HTTPException(status_code=404, detail="File not found")
# Check if user has access to the file # Check if user has access to the file
if file.get("userId", 0) != current_user.get("id", 0): if file.get("userId", 0) != currentUser.get("id", 0):
raise HTTPException(status_code=403, detail="Not authorized to update this file") raise HTTPException(status_code=403, detail="Not authorized to update this file")
# Convert FileItem to dict for interface # Convert FileItem to dict for interface
@ -171,8 +171,8 @@ async def update_file(
raise HTTPException(status_code=500, detail="Failed to update file") raise HTTPException(status_code=500, detail="Failed to update file")
# Get updated file and convert to FileItem # Get updated file and convert to FileItem
updated_file = interfaceLucydom.getFile(file_id) updatedFile = interfaceLucydom.getFile(file_id)
return FileItem(**updated_file) return FileItem(**updatedFile)
except HTTPException as he: except HTTPException as he:
raise he raise he

View file

@ -1,8 +1,8 @@
from fastapi import APIRouter, HTTPException, Depends, Body, status, Response from fastapi import APIRouter, HTTPException, Depends, Body, status, Response
from fastapi.responses import FileResponse from fastapi.responses import FileResponse, HTMLResponse, RedirectResponse, JSONResponse
from fastapi.security import OAuth2PasswordRequestForm from fastapi.security import OAuth2PasswordRequestForm
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
from typing import Dict, Any from typing import Dict, Any, Optional
from datetime import timedelta from datetime import timedelta
import pathlib import pathlib
import os import os
@ -11,7 +11,6 @@ from pathlib import Path as FilePath
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
import modules.security.auth as auth import modules.security.auth as auth
import modules.interfaces.gatewayModel as gatewayModel
import modules.interfaces.gatewayInterface as gatewayInterface import modules.interfaces.gatewayInterface as gatewayInterface
router = APIRouter( router = APIRouter(
@ -54,16 +53,23 @@ async def options_route(fullPath: str):
return Response(status_code=200) return Response(status_code=200)
@router.post("/api/token", response_model=gatewayModel.Token, tags=["General"]) @router.post("/api/token", response_model=gatewayModel.Token, tags=["General"])
async def login_for_access_token(formData: OAuth2PasswordRequestForm = Depends()): async def login_for_access_token(
formData: OAuth2PasswordRequestForm = Depends(),
authority: str = "local",
external_token: Optional[str] = None
):
"""Get access token for user authentication"""
# Create a new gateway interface instance with admin context # Create a new gateway interface instance with admin context
interfaceRoot = auth.getRootInterface() interfaceRoot = gatewayInterface.getRootInterface()
try: try:
# Authenticate user # Get token directly
user = interfaceRoot.authenticateUser(formData.username, formData.password) token = interfaceRoot.authenticateAndGetToken(
username=formData.username,
# Authenticate user and get token password=formData.password,
token = interfaceRoot.authenticateAndGetToken(formData.username, formData.password) authority=authority,
external_token=external_token
)
return token return token
except ValueError as e: except ValueError as e:
# Handle authentication errors # Handle authentication errors
@ -91,7 +97,7 @@ async def read_user_me(currentUser: Dict[str, Any] = Depends(auth.getCurrentActi
async def register_user(userData: gatewayModel.User): async def register_user(userData: gatewayModel.User):
"""Register a new user.""" """Register a new user."""
try: try:
interfaceRoot = auth.getRootInterface() interfaceRoot = gatewayInterface.getRootInterface()
return interfaceRoot.registerUser(userData.model_dump()) return interfaceRoot.registerUser(userData.model_dump())
except ValueError as e: except ValueError as e:
raise HTTPException( raise HTTPException(
@ -112,7 +118,7 @@ async def check_username_availability(
): ):
"""Check if a username is available for registration""" """Check if a username is available for registration"""
try: try:
interfaceRoot = auth.getRootInterface() interfaceRoot = gatewayInterface.getRootInterface()
return interfaceRoot.checkUsernameAvailability(username, authenticationAuthority) return interfaceRoot.checkUsernameAvailability(username, authenticationAuthority)
except Exception as e: except Exception as e:
logger.error(f"Error checking username availability: {str(e)}") logger.error(f"Error checking username availability: {str(e)}")

View file

@ -0,0 +1,322 @@
"""
Routes for Google authentication.
"""
from fastapi import APIRouter, HTTPException, Depends, Request, Response, status, Cookie, Body
from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse
import logging
import json
from typing import Dict, Any, Optional, List
from datetime import datetime, timedelta
# Import auth module
import modules.security.auth as auth
# Import interfaces
import modules.interfaces.googleInterface as googleInterface
import modules.interfaces.gatewayInterface as gatewayInterface
from modules.interfaces.googleModel import (
GoogleToken,
GoogleUserInfo,
GoogleAuthStatus,
GoogleTokenResponse,
GoogleSaveTokenResponse
)
# Configure logger
logger = logging.getLogger(__name__)
# Create router for Google Auth endpoints
router = APIRouter(
prefix="/api/google",
tags=["Google"],
responses={
404: {"description": "Not found"},
400: {"description": "Bad request"},
401: {"description": "Unauthorized"},
403: {"description": "Forbidden"},
500: {"description": "Internal server error"}
}
)
@router.get("/login")
async def login():
"""Initiate Google login for the current user"""
try:
# Get Google interface with root context for initial setup
google = googleInterface.getRootInterface()
# Get login URL
auth_url = google.initiateLogin()
if not auth_url:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to initiate Google login"
)
logger.info("Redirecting to Google login")
return RedirectResponse(auth_url)
except Exception as e:
logger.error(f"Error initiating Google login: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to initiate Google login: {str(e)}"
)
@router.get("/auth/callback")
async def auth_callback(code: str, state: str, request: Request):
"""Handle Google OAuth callback"""
try:
# Get Google interface with root context for initial setup
google = googleInterface.getRootInterface()
# Handle auth callback
token_response = google.handleAuthCallback(code)
if not token_response:
return HTMLResponse(
content="""
<html>
<head>
<title>Authentication Failed</title>
<style>
body { font-family: Arial, sans-serif; text-align: center; margin-top: 50px; }
.error { color: red; }
</style>
</head>
<body>
<h1 class="error">Authentication Failed</h1>
<p>Could not acquire access token.</p>
<script>
setTimeout(() => window.close(), 3000);
</script>
</body>
</html>
""",
status_code=400
)
# Get gateway interface for user operations
gateway = gatewayInterface.getRootInterface()
# Check if user exists
user = gateway.getUserByUsername(token_response.user_info["email"])
# If user doesn't exist, create a new user in the default mandate
if not user:
try:
# Get the root mandate ID
rootMandateId = gateway.getInitialId("mandates")
if not rootMandateId:
raise ValueError("Root mandate not found")
# Create new user with Google authentication
user = gateway.createUser(
username=token_response.user_info["email"],
email=token_response.user_info["email"],
fullName=token_response.user_info.get("name", token_response.user_info["email"]),
mandateId=rootMandateId,
authenticationAuthority="google"
)
logger.info(f"Created new user for Google account: {token_response.user_info['email']}")
# Verify user was created by retrieving it
user = gateway.getUserByUsername(token_response.user_info["email"])
if not user:
raise ValueError("Failed to retrieve created user")
except Exception as e:
logger.error(f"Failed to create user for Google account: {str(e)}")
return HTMLResponse(
content="""
<html>
<head>
<title>Registration Failed</title>
<style>
body { font-family: Arial, sans-serif; text-align: center; margin-top: 50px; }
.error { color: red; }
</style>
</head>
<body>
<h1 class="error">Registration Failed</h1>
<p>Could not create user account.</p>
<script>
setTimeout(() => window.close(), 3000);
</script>
</body>
</html>
""",
status_code=400
)
# Create backend token
access_token_expires = timedelta(minutes=auth.ACCESS_TOKEN_EXPIRE_MINUTES)
access_token = auth.createAccessToken(
data={
"sub": user["username"],
"mandateId": str(user["mandateId"]),
"userId": str(user["id"]),
"authenticationAuthority": "google"
},
expiresDelta=access_token_expires
)
# Store tokens in session storage for the frontend to pick up
response = HTMLResponse(
content=f"""
<html>
<head>
<title>Authentication Successful</title>
<style>
body {{ font-family: Arial, sans-serif; text-align: center; margin-top: 50px; }}
.success {{ color: green; }}
</style>
</head>
<body>
<h1 class="success">Authentication Successful</h1>
<p>Welcome, {token_response.user_info.get('name', 'User')}!</p>
<p>This window will close automatically.</p>
<script>
// Store token data in session storage
sessionStorage.setItem('google_token_data', JSON.stringify({json.dumps(token_response.model_dump())}));
// Notify parent window of success
if (window.opener) {{
window.opener.postMessage({{
type: 'google_auth_success',
user: {json.dumps(token_response.user_info)},
token_data: {json.dumps(token_response.model_dump())},
access_token: "{access_token}"
}}, '*');
}}
// Close window after 3 seconds
setTimeout(() => window.close(), 3000);
</script>
</body>
</html>
"""
)
return response
except Exception as e:
logger.error(f"Error in auth callback: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Authentication failed: {str(e)}"
)
@router.get("/status", response_model=GoogleAuthStatus)
async def auth_status(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)):
"""Check Google authentication status"""
try:
# For authenticated endpoints, use the current user's context
google = googleInterface.getInterface(currentUser)
# Get current user token and info
user_info, access_token = google.getCurrentUserToken()
if not user_info or not access_token:
return GoogleAuthStatus(
authenticated=False,
message="Not authenticated with Google"
)
# Convert user_info to GoogleUserInfo model
user_info_model = GoogleUserInfo(**user_info)
return GoogleAuthStatus(
authenticated=True,
user=user_info_model
)
except Exception as e:
logger.error(f"Error checking authentication status: {str(e)}")
return GoogleAuthStatus(
authenticated=False,
message=f"Error checking authentication status: {str(e)}"
)
@router.get("/token", response_model=GoogleTokenResponse)
async def get_token(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)):
"""Get Google token for current user."""
try:
# For authenticated endpoints, use the current user's context
google = googleInterface.getInterface(currentUser)
# Get token
token_data = google.getGoogleToken()
if not token_data:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="No token found"
)
# Convert to GoogleToken model
token = GoogleToken(**token_data)
return GoogleTokenResponse(token=token)
except Exception as e:
logger.error(f"Error getting token: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=str(e)
)
@router.post("/save-token", response_model=GoogleSaveTokenResponse)
async def save_token(
token_data: GoogleToken,
currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)
):
"""Save Google token data from frontend"""
try:
# For authenticated endpoints, use the current user's context
google = googleInterface.getInterface(currentUser)
# Save token
success = google.saveGoogleToken(token_data.model_dump())
if success:
return GoogleSaveTokenResponse(
success=True,
message="Token saved successfully",
token=token_data
)
else:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to save token"
)
except Exception as e:
logger.error(f"Error saving token: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error saving token: {str(e)}"
)
@router.post("/logout")
async def logout(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)):
"""Logout from Google"""
try:
# For authenticated endpoints, use the current user's context
google = googleInterface.getInterface(currentUser)
# Delete token
success = google.deleteGoogleToken()
if success:
return JSONResponse({
"message": "Successfully logged out from Google"
})
else:
return JSONResponse({
"message": "Failed to logout from Google"
})
except Exception as e:
logger.error(f"Error during logout: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Logout failed: {str(e)}"
)

View file

@ -1,3 +1,7 @@
"""
Routes for Microsoft authentication.
"""
from fastapi import APIRouter, HTTPException, Depends, Request, Response, status, Cookie, Body from fastapi import APIRouter, HTTPException, Depends, Request, Response, status, Cookie, Body
from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse
import logging import logging
@ -10,6 +14,7 @@ import modules.security.auth as auth
# Import interfaces # Import interfaces
import modules.interfaces.msftInterface as msftInterface import modules.interfaces.msftInterface as msftInterface
import modules.interfaces.gatewayInterface as gatewayInterface
from modules.interfaces.msftModel import ( from modules.interfaces.msftModel import (
MsftToken, MsftToken,
MsftUserInfo, MsftUserInfo,
@ -38,8 +43,8 @@ router = APIRouter(
async def login(): async def login():
"""Initiate Microsoft login for the current user""" """Initiate Microsoft login for the current user"""
try: try:
# Get Microsoft interface # Get Microsoft interface with root context for initial setup
msft = msftInterface.getInterface({"_mandateId": "root", "id": "root"}) msft = msftInterface.getRootInterface()
# Get login URL # Get login URL
auth_url = msft.initiateLogin() auth_url = msft.initiateLogin()
@ -63,8 +68,8 @@ async def login():
async def auth_callback(code: str, state: str, request: Request): async def auth_callback(code: str, state: str, request: Request):
"""Handle Microsoft OAuth callback""" """Handle Microsoft OAuth callback"""
try: try:
# Get Microsoft interface # Get Microsoft interface with root context for initial setup
msft = msftInterface.getInterface({"_mandateId": "root", "id": "root"}) msft = msftInterface.getRootInterface()
# Handle auth callback # Handle auth callback
token_response = msft.handleAuthCallback(code) token_response = msft.handleAuthCallback(code)
@ -92,10 +97,10 @@ async def auth_callback(code: str, state: str, request: Request):
) )
# Get gateway interface for user operations # Get gateway interface for user operations
gateway = auth.getRootInterface() gateway = gatewayInterface.getRootInterface()
# Check if user exists # Check if user exists
user = gateway.getUserByUsername(token_response["user_info"]["email"]) user = gateway.getUserByUsername(token_response.user_info["email"])
# If user doesn't exist, create a new user in the default mandate # If user doesn't exist, create a new user in the default mandate
if not user: if not user:
@ -107,16 +112,16 @@ async def auth_callback(code: str, state: str, request: Request):
# Create new user with Microsoft authentication # Create new user with Microsoft authentication
user = gateway.createUser( user = gateway.createUser(
username=token_response["user_info"]["email"], username=token_response.user_info["email"],
email=token_response["user_info"]["email"], email=token_response.user_info["email"],
fullName=token_response["user_info"].get("name", token_response["user_info"]["email"]), fullName=token_response.user_info.get("name", token_response.user_info["email"]),
_mandateId=rootMandateId, mandateId=rootMandateId,
authenticationAuthority="microsoft" authenticationAuthority="microsoft"
) )
logger.info(f"Created new user for Microsoft account: {token_response['user_info']['email']}") logger.info(f"Created new user for Microsoft account: {token_response.user_info['email']}")
# Verify user was created by retrieving it # Verify user was created by retrieving it
user = gateway.getUserByUsername(token_response["user_info"]["email"]) user = gateway.getUserByUsername(token_response.user_info["email"])
if not user: if not user:
raise ValueError("Failed to retrieve created user") raise ValueError("Failed to retrieve created user")
@ -149,8 +154,8 @@ async def auth_callback(code: str, state: str, request: Request):
access_token = auth.createAccessToken( access_token = auth.createAccessToken(
data={ data={
"sub": user["username"], "sub": user["username"],
"_mandateId": str(user["_mandateId"]), "mandateId": str(user["mandateId"]),
"_userId": str(user["id"]), "userId": str(user["id"]),
"authenticationAuthority": "microsoft" "authenticationAuthority": "microsoft"
}, },
expiresDelta=access_token_expires expiresDelta=access_token_expires
@ -169,18 +174,18 @@ async def auth_callback(code: str, state: str, request: Request):
</head> </head>
<body> <body>
<h1 class="success">Authentication Successful</h1> <h1 class="success">Authentication Successful</h1>
<p>Welcome, {token_response['user_info'].get('name', 'User')}!</p> <p>Welcome, {token_response.user_info.get('name', 'User')}!</p>
<p>This window will close automatically.</p> <p>This window will close automatically.</p>
<script> <script>
// Store token data in session storage // Store token data in session storage
sessionStorage.setItem('msft_token_data', JSON.stringify({json.dumps(token_response)})); sessionStorage.setItem('msft_token_data', JSON.stringify({json.dumps(token_response.model_dump())}));
// Notify parent window of success // Notify parent window of success
if (window.opener) {{ if (window.opener) {{
window.opener.postMessage({{ window.opener.postMessage({{
type: 'msft_auth_success', type: 'msft_auth_success',
user: {json.dumps(token_response['user_info'])}, user: {json.dumps(token_response.user_info)},
token_data: {json.dumps(token_response)}, token_data: {json.dumps(token_response.model_dump())},
access_token: "{access_token}" access_token: "{access_token}"
}}, '*'); }}, '*');
}} }}
@ -205,7 +210,7 @@ async def auth_callback(code: str, state: str, request: Request):
async def auth_status(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)): async def auth_status(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)):
"""Check Microsoft authentication status""" """Check Microsoft authentication status"""
try: try:
# Get Microsoft interface # For authenticated endpoints, use the current user's context
msft = msftInterface.getInterface(currentUser) msft = msftInterface.getInterface(currentUser)
# Get current user token and info # Get current user token and info
@ -236,17 +241,21 @@ async def auth_status(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiv
async def get_token(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)): async def get_token(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)):
"""Get Microsoft token for current user.""" """Get Microsoft token for current user."""
try: try:
# Get Microsoft interface # For authenticated endpoints, use the current user's context
msft = msftInterface.getInterface(currentUser) msft = msftInterface.getInterface(currentUser)
# Get token # Get token
token_data = msft.getMsftToken() token_data = msft.getMsftToken()
if token_data: if not token_data:
return MsftTokenResponse(token=token_data) raise HTTPException(
raise HTTPException( status_code=status.HTTP_404_NOT_FOUND,
status_code=status.HTTP_404_NOT_FOUND, detail="No token found"
detail="No token found" )
)
# Convert to MsftToken model
token = MsftToken(**token_data)
return MsftTokenResponse(token=token)
except Exception as e: except Exception as e:
logger.error(f"Error getting token: {str(e)}") logger.error(f"Error getting token: {str(e)}")
raise HTTPException( raise HTTPException(
@ -261,7 +270,7 @@ async def save_token(
): ):
"""Save Microsoft token data from frontend""" """Save Microsoft token data from frontend"""
try: try:
# Get Microsoft interface # For authenticated endpoints, use the current user's context
msft = msftInterface.getInterface(currentUser) msft = msftInterface.getInterface(currentUser)
# Save token # Save token
@ -290,11 +299,11 @@ async def save_token(
async def logout(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)): async def logout(currentUser: Dict[str, Any] = Depends(auth.getCurrentActiveUser)):
"""Logout from Microsoft""" """Logout from Microsoft"""
try: try:
# Get Microsoft interface # For authenticated endpoints, use the current user's context
msft = msftInterface.getInterface(currentUser) msft = msftInterface.getInterface(currentUser)
# Delete token # Delete token
success = msft.db.deleteToken(currentUser["id"]) success = msft.deleteMsftToken()
if success: if success:
return JSONResponse({ return JSONResponse({

View file

@ -70,7 +70,7 @@ async def create_user(
"""Create a new user""" """Create a new user"""
try: try:
# Get admin user for user creation # Get admin user for user creation
interfaceRoot = auth.getRootInterface() interfaceRoot = gatewayInterface.getRootInterface()
try: try:
# Convert User model to dict and pass to createUser # Convert User model to dict and pass to createUser
@ -114,10 +114,10 @@ async def update_user(
"""Update an existing user""" """Update an existing user"""
try: try:
# Get admin user for user updates # Get admin user for user updates
interfaceRoot = auth.getRootInterface() interfaceGateway = gatewayInterface.getInterface(currentUser)
# Check if user exists # Check if user exists
existingUser = interfaceRoot.getUserById(userId) existingUser = interfaceGateway.getUserById(userId)
if not existingUser: if not existingUser:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND,
@ -126,7 +126,7 @@ async def update_user(
# Update user data # Update user data
try: try:
updatedUser = interfaceRoot.updateUser(userId, userData) updatedUser = interfaceGateway.updateUser(userId, userData)
except ValueError as e: except ValueError as e:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, status_code=status.HTTP_400_BAD_REQUEST,

View file

@ -16,6 +16,7 @@ import modules.security.auth as auth
# Import interfaces # Import interfaces
import modules.interfaces.lucydomInterface as lucydomInterface import modules.interfaces.lucydomInterface as lucydomInterface
import modules.interfaces.msftInterface as msftInterface import modules.interfaces.msftInterface as msftInterface
import modules.interfaces.googleInterface as googleInterface
# Import workflow manager # Import workflow manager
from modules.workflow.workflowManager import getWorkflowManager from modules.workflow.workflowManager import getWorkflowManager
@ -44,6 +45,21 @@ router = APIRouter(
responses={404: {"description": "Not found"}} responses={404: {"description": "Not found"}}
) )
def createServiceContainer(currentUser: Dict[str, Any]):
"""Create a service container with all required interfaces."""
# Get all interfaces
interfaceBase = lucydomInterface.getInterface(currentUser)
interfaceMsft = msftInterface.getInterface(currentUser)
interfaceGoogle = googleInterface.getInterface(currentUser)
# Create service container
service = type('ServiceContainer', (), {
'base': interfaceBase,
'msft': interfaceMsft,
'google': interfaceGoogle
})
return service
# API Endpoint for getting all workflows # API Endpoint for getting all workflows
@router.get("", response_model=List[ChatWorkflow]) @router.get("", response_model=List[ChatWorkflow])
@ -52,11 +68,11 @@ async def list_workflows(
): ):
"""List all workflows for the current user.""" """List all workflows for the current user."""
try: try:
# Get interface with current user context # Get service container
interfaceLucydom = lucydomInterface.getInterface(currentUser) service = createServiceContainer(currentUser)
# Retrieve workflows for the user # Retrieve workflows for the user
workflows = interfaceLucydom.getWorkflowsByUser(currentUser["id"]) workflows = service.base.getWorkflowsByUser(currentUser["id"])
return [ChatWorkflow(**workflow) for workflow in workflows] return [ChatWorkflow(**workflow) for workflow in workflows]
except Exception as e: except Exception as e:
logger.error(f"Error listing workflows: {str(e)}", exc_info=True) logger.error(f"Error listing workflows: {str(e)}", exc_info=True)
@ -77,24 +93,20 @@ async def start_workflow(
Corresponds to State 1 in the state machine documentation. Corresponds to State 1 in the state machine documentation.
""" """
try: try:
# Get interface with current user context # Get service container
interfaceLucydom = lucydomInterface.getInterface(currentUser) service = createServiceContainer(currentUser)
interfaceMsft = msftInterface.getInterface(currentUser)
# Get workflow manager with interface # Get workflow manager
workflowManager = await getWorkflowManager(interfaceLucydom, interfaceMsft) workflowManager = await getWorkflowManager(service)
# Start or continue workflow using the workflow manager # Start or continue workflow
workflow = await workflowManager.workflowStart(userInput.dict(), workflowId) workflow = await workflowManager.workflowStart(userInput, workflowId)
logger.info("User Input received. Answer:", workflow)
return workflow
return ChatWorkflow(**workflow)
except Exception as e: except Exception as e:
logger.error(f"Error starting workflow: {str(e)}", exc_info=True) logger.error(f"Error in start_workflow: {str(e)}")
raise HTTPException( raise HTTPException(status_code=500, detail=str(e))
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error starting workflow: {str(e)}"
)
# State 8: Workflow Stopped endpoint # State 8: Workflow Stopped endpoint
@router.post("/{workflowId}/stop", response_model=ChatWorkflow) @router.post("/{workflowId}/stop", response_model=ChatWorkflow)
@ -104,37 +116,20 @@ async def stop_workflow(
): ):
"""Stops a running workflow.""" """Stops a running workflow."""
try: try:
# Get interface with current user context # Get service container
interfaceLucydom = lucydomInterface.getInterface(currentUser) service = createServiceContainer(currentUser)
interfaceMsft = msftInterface.getInterface(currentUser)
# Verify workflow exists and belongs to user # Get workflow manager
workflow = interfaceLucydom.getWorkflow(workflowId) workflowManager = await getWorkflowManager(service)
if not workflow:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Workflow with ID {workflowId} not found"
)
if workflow.get("_userId") != currentUser["id"]: # Stop workflow
raise HTTPException( workflow = await workflowManager.workflowStop(workflowId)
status_code=status.HTTP_403_FORBIDDEN,
detail="You don't have permission to stop this workflow"
)
# Stop the workflow return workflow
workflowManager = await getWorkflowManager(interfaceLucydom, interfaceMsft)
stoppedWorkflow = await workflowManager.workflowStop(workflowId)
return ChatWorkflow(**stoppedWorkflow)
except HTTPException:
raise
except Exception as e: except Exception as e:
logger.error(f"Error stopping workflow: {str(e)}", exc_info=True) logger.error(f"Error in stop_workflow: {str(e)}")
raise HTTPException( raise HTTPException(status_code=500, detail=str(e))
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error stopping workflow: {str(e)}"
)
# State 11: Workflow Reset/Deletion endpoint # State 11: Workflow Reset/Deletion endpoint
@router.delete("/{workflowId}", response_model=Dict[str, Any]) @router.delete("/{workflowId}", response_model=Dict[str, Any])
@ -144,11 +139,11 @@ async def delete_workflow(
): ):
"""Deletes a workflow and its associated data.""" """Deletes a workflow and its associated data."""
try: try:
# Get interface with current user context # Get service container
interfaceLucydom = lucydomInterface.getInterface(currentUser) service = createServiceContainer(currentUser)
# Verify workflow exists # Verify workflow exists
workflow = interfaceLucydom.getWorkflow(workflowId) workflow = service.base.getWorkflow(workflowId)
if not workflow: if not workflow:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND,
@ -163,7 +158,7 @@ async def delete_workflow(
) )
# Delete workflow # Delete workflow
success = interfaceLucydom.deleteWorkflow(workflowId) success = service.base.deleteWorkflow(workflowId)
if not success: if not success:
raise HTTPException( raise HTTPException(
@ -192,11 +187,11 @@ async def get_workflow_status(
): ):
"""Get the current status of a workflow.""" """Get the current status of a workflow."""
try: try:
# Get interface with current user context # Get service container
interfaceLucydom = lucydomInterface.getInterface(currentUser) service = createServiceContainer(currentUser)
# Retrieve workflow # Retrieve workflow
workflow = interfaceLucydom.getWorkflow(workflowId) workflow = service.base.getWorkflow(workflowId)
if not workflow: if not workflow:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND,
@ -222,11 +217,11 @@ async def get_workflow_logs(
): ):
"""Get logs for a workflow with support for selective data transfer.""" """Get logs for a workflow with support for selective data transfer."""
try: try:
# Get interface with current user context # Get service container
interfaceLucydom = lucydomInterface.getInterface(currentUser) service = createServiceContainer(currentUser)
# Verify workflow exists # Verify workflow exists
workflow = interfaceLucydom.getWorkflow(workflowId) workflow = service.base.getWorkflow(workflowId)
if not workflow: if not workflow:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND,
@ -234,7 +229,7 @@ async def get_workflow_logs(
) )
# Get all logs # Get all logs
allLogs = interfaceLucydom.getWorkflowLogs(workflowId) allLogs = service.base.getWorkflowLogs(workflowId)
# Apply selective data transfer if logId is provided # Apply selective data transfer if logId is provided
if logId: if logId:
@ -263,11 +258,11 @@ async def get_workflow_messages(
): ):
"""Get messages for a workflow with support for selective data transfer.""" """Get messages for a workflow with support for selective data transfer."""
try: try:
# Get admin user for workflow operations # Get service container
interfaceLucydom = lucydomInterface.getInterface(currentUser) service = createServiceContainer(currentUser)
# Verify workflow exists # Verify workflow exists
workflow = interfaceLucydom.getWorkflow(workflowId) workflow = service.base.getWorkflow(workflowId)
if not workflow: if not workflow:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND,
@ -275,7 +270,7 @@ async def get_workflow_messages(
) )
# Get all messages # Get all messages
allMessages = interfaceLucydom.getWorkflowMessages(workflowId) allMessages = service.base.getWorkflowMessages(workflowId)
# Apply selective data transfer if messageId is provided # Apply selective data transfer if messageId is provided
if messageId: if messageId:
@ -313,11 +308,11 @@ async def delete_workflow_message(
): ):
"""Delete a message from a workflow.""" """Delete a message from a workflow."""
try: try:
# Get admin user for workflow operations # Get service container
interfaceLucydom = lucydomInterface.getInterface(currentUser) service = createServiceContainer(currentUser)
# Verify workflow exists and belongs to user # Verify workflow exists and belongs to user
workflow = interfaceLucydom.getWorkflow(workflowId) workflow = service.base.getWorkflow(workflowId)
if not workflow: if not workflow:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND,
@ -325,7 +320,7 @@ async def delete_workflow_message(
) )
# Delete the message # Delete the message
success = interfaceLucydom.deleteWorkflowMessage(workflowId, messageId) success = service.base.deleteWorkflowMessage(workflowId, messageId)
if not success: if not success:
raise HTTPException( raise HTTPException(
@ -337,7 +332,7 @@ async def delete_workflow_message(
messageIds = workflow.get("messageIds", []) messageIds = workflow.get("messageIds", [])
if messageId in messageIds: if messageId in messageIds:
messageIds.remove(messageId) messageIds.remove(messageId)
interfaceLucydom.updateWorkflow(workflowId, {"messageIds": messageIds}) service.base.updateWorkflow(workflowId, {"messageIds": messageIds})
return { return {
"workflowId": workflowId, "workflowId": workflowId,
@ -362,11 +357,11 @@ async def delete_file_from_message(
): ):
"""Delete a file reference from a message in a workflow.""" """Delete a file reference from a message in a workflow."""
try: try:
# Get admin user for workflow operations # Get service container
interfaceLucydom = lucydomInterface.getInterface(currentUser) service = createServiceContainer(currentUser)
# Verify workflow exists and belongs to user # Verify workflow exists and belongs to user
workflow = interfaceLucydom.getWorkflow(workflowId) workflow = service.base.getWorkflow(workflowId)
if not workflow: if not workflow:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND,
@ -374,7 +369,7 @@ async def delete_file_from_message(
) )
# Delete file reference from message # Delete file reference from message
success = interfaceLucydom.deleteFileFromMessage(workflowId, messageId, fileId) success = service.base.deleteFileFromMessage(workflowId, messageId, fileId)
if not success: if not success:
raise HTTPException( raise HTTPException(
@ -406,11 +401,11 @@ async def preview_file(
): ):
"""Get file metadata and a preview of the file content.""" """Get file metadata and a preview of the file content."""
try: try:
# Get admin user for workflow operations # Get service container
interfaceLucydom = lucydomInterface.getInterface(currentUser) service = createServiceContainer(currentUser)
# Get file metadata # Get file metadata
file = interfaceLucydom.getFile(fileId) file = service.base.getFile(fileId)
if not file: if not file:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND,
@ -418,7 +413,7 @@ async def preview_file(
) )
# Get file data (limited for preview) # Get file data (limited for preview)
fileData = interfaceLucydom.getFileData(fileId) fileData = service.base.getFileData(fileId)
if fileData is None: if fileData is None:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND,
@ -436,7 +431,7 @@ async def preview_file(
previewData = None previewData = None
# Get base64Encoded flag from database # Get base64Encoded flag from database
fileDataEntries = interfaceLucydom.db.getRecordset("fileData", recordFilter={"id": fileId}) fileDataEntries = service.base.db.getRecordset("fileData", recordFilter={"id": fileId})
if fileDataEntries and "base64Encoded" in fileDataEntries[0]: if fileDataEntries and "base64Encoded" in fileDataEntries[0]:
# Use the flag from the database # Use the flag from the database
base64Encoded = fileDataEntries[0]["base64Encoded"] base64Encoded = fileDataEntries[0]["base64Encoded"]
@ -500,11 +495,11 @@ async def download_file(
): ):
"""Download a file.""" """Download a file."""
try: try:
# Get admin user for workflow operations # Get service container
interfaceLucydom = lucydomInterface.getInterface(currentUser) service = createServiceContainer(currentUser)
# Get file data # Get file data
fileInfo = interfaceLucydom.downloadFile(fileId) fileInfo = service.base.downloadFile(fileId)
if not fileInfo: if not fileInfo:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND,

View file

@ -10,7 +10,6 @@ from fastapi.security import OAuth2PasswordBearer
from jose import JWTError, jwt from jose import JWTError, jwt
import logging import logging
from modules.interfaces.gatewayInterface import getInterface
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
# Get Config Data # Get Config Data
@ -126,27 +125,3 @@ def getCurrentActiveUser(currentUser: Dict[str, Any] = Depends(_getCurrentUser))
) )
return currentUser return currentUser
def getRootInterface() -> Dict[str, Any]:
try:
# Get the initial user ID from the database
gateway = getInterface() # Initialize without user context
initialUserId = gateway.getInitialId("users")
if not initialUserId:
raise ValueError("No initial user ID found in database")
# Get the actual user record
gateway.setUserContext(initialUserId)
rootUser = gateway.getUser(initialUserId)
if not rootUser:
raise ValueError(f"Root user with ID {initialUserId} not found in database")
return getInterface(currentUser=rootUser)
except Exception as e:
logger.error(f"Error getting root access: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to get root access: {str(e)}"
)

View file

@ -10,6 +10,7 @@ import uuid
from datetime import datetime from datetime import datetime
from typing import Dict, Any, List, Optional from typing import Dict, Any, List, Optional
from modules.shared.mimeUtils import isTextMimeType, determineContentEncoding from modules.shared.mimeUtils import isTextMimeType, determineContentEncoding
from modules.interfaces.lucydomModel import ChatContent
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -29,15 +30,47 @@ class AgentBase:
self.service = None self.service = None
def setWorkflowManager(self, workflowManager): def setWorkflowManager(self, workflowManager):
"""Set the workflow manager reference.""" """
Set the workflow manager reference and validate service container.
Args:
workflowManager: The workflow manager instance
"""
if not workflowManager:
logger.warning("Attempted to set null workflow manager")
return False
self.workflowManager = workflowManager self.workflowManager = workflowManager
# Also set service reference from workflow manager
if workflowManager and hasattr(workflowManager, 'service'): # Set service reference from workflow manager if available
self.service = workflowManager.service if hasattr(workflowManager, 'service'):
return self.setService(workflowManager.service)
return False
def setService(self, service): def setService(self, service):
"""Set the service container reference.""" """
Set the service container reference and validate required interfaces.
Args:
service: The service container with required interfaces
"""
if not service:
logger.warning("Attempted to set null service container")
return False
# Validate required interfaces
required_interfaces = ['base', 'msft', 'google']
missing_interfaces = []
for interface in required_interfaces:
if not hasattr(service, interface):
missing_interfaces.append(interface)
if missing_interfaces:
logger.warning(f"Service container missing required interfaces: {', '.join(missing_interfaces)}")
return False
self.service = service self.service = service
return True
def getAgentInfo(self) -> Dict[str, Any]: def getAgentInfo(self) -> Dict[str, Any]:
""" """
@ -48,6 +81,7 @@ class AgentBase:
""" """
return { return {
"name": self.name, "name": self.name,
"label": self.label,
"description": self.description, "description": self.description,
"capabilities": self.capabilities "capabilities": self.capabilities
} }
@ -77,6 +111,21 @@ class AgentBase:
- documents: List of document objects created by the agent, - documents: List of document objects created by the agent,
each containing a "base64Encoded" flag in addition to "label" and "content" each containing a "base64Encoded" flag in addition to "label" and "content"
""" """
# Validate service and workflow manager
if not self.service:
logger.error("Service container not initialized")
return {
"feedback": "Error: Service container not initialized",
"documents": []
}
if not self.workflowManager:
logger.error("Workflow manager not initialized")
return {
"feedback": "Error: Workflow manager not initialized",
"documents": []
}
# Base implementation - should be overridden by specialized agents # Base implementation - should be overridden by specialized agents
logger.warning(f"Agent {self.name} is using the default implementation of processTask") logger.warning(f"Agent {self.name} is using the default implementation of processTask")
return { return {
@ -85,51 +134,53 @@ class AgentBase:
} }
def determineBase64EncodingFlag(self, filename: str, content: Any, mimeType: str = None) -> bool: def determineBase64EncodingFlag(self, filename: str, content: Any, mimeType: str = None) -> bool:
"""Wrapper for the utility function""" """
Determine if content should be base64 encoded.
Args:
filename: Name of the file
content: Content to check
mimeType: Optional MIME type
Returns:
Boolean indicating if content should be base64 encoded
"""
return determineContentEncoding(filename, content, mimeType) return determineContentEncoding(filename, content, mimeType)
def isTextMimeType(self, mimeType: str) -> bool: def isTextMimeType(self, mimeType: str) -> bool:
"""Wrapper for the utility function"""
return isTextMimeType(mimeType)
def formatAgentDocumentOutput(self, label: str, content: Any, mimeType: str = None) -> Dict[str, Any]:
""" """
Format agent output as a document. Check if MIME type is text-based.
Args: Args:
label: Label for the document mimeType: MIME type to check
content: Content of the document
mimeType: Optional MIME type for the document Returns:
Boolean indicating if MIME type is text-based
""" """
# Create document structure return isTextMimeType(mimeType)
doc = {
"id": str(uuid.uuid4()),
"name": label,
"ext": "txt", # Default extension
"data": content,
"base64Encoded": False,
"metadata": {
"isText": True
}
}
# Set MIME type if provided def formatAgentDocumentOutput(self, label: str, content: str, contentType: str, base64Encoded: bool = False) -> ChatContent:
if mimeType: """
doc["mimeType"] = mimeType Format agent document output using ChatContent model.
# Update extension based on MIME type
if mimeType == "text/markdown":
doc["ext"] = "md"
elif mimeType == "text/html":
doc["ext"] = "html"
elif mimeType == "text/csv":
doc["ext"] = "csv"
elif mimeType == "application/json":
doc["ext"] = "json"
elif mimeType.startswith("image/"):
doc["ext"] = mimeType.split("/")[1]
doc["metadata"]["isText"] = False
elif mimeType == "application/pdf":
doc["ext"] = "pdf"
doc["metadata"]["isText"] = False
return doc Args:
label: Document label/filename
content: Document content
contentType: MIME type of content
base64Encoded: Whether content is base64 encoded
Returns:
ChatContent object with the following attributes:
- sequenceNr: Sequence number (defaults to 1)
- name: Document label/filename
- mimeType: MIME type of content
- data: Actual content
- metadata: Additional metadata including base64Encoded flag
"""
return ChatContent(
sequenceNr=1,
name=label,
mimeType=contentType,
data=content,
metadata={"base64Encoded": base64Encoded}
)

View file

@ -1,7 +1,5 @@
""" """
Agent Registry Module. Agent Registry Module for managing and initializing agents.
Provides a central registry system for all available agents.
Optimized for the standardized task processing pattern.
""" """
import os import os
@ -34,12 +32,27 @@ class AgentRegistry:
def initialize(self, service=None, workflowManager=None): def initialize(self, service=None, workflowManager=None):
"""Initialize or update the registry with workflow manager and service references.""" """Initialize or update the registry with workflow manager and service references."""
if service:
# Validate required interfaces
required_interfaces = ['base', 'msft', 'google']
missing_interfaces = []
for interface in required_interfaces:
if not hasattr(service, interface):
missing_interfaces.append(interface)
if missing_interfaces:
logger.warning(f"Service container missing required interfaces: {', '.join(missing_interfaces)}")
return False
# Initialize agents with service and workflow manager
for agent in self.agents.values(): for agent in self.agents.values():
if workflowManager and hasattr(agent, 'setWorkflowManager'): if workflowManager and hasattr(agent, 'setWorkflowManager'):
agent.setWorkflowManager(workflowManager) agent.setWorkflowManager(workflowManager)
if service and hasattr(agent, 'setService'): if service and hasattr(agent, 'setService'):
agent.setService(service) agent.setService(service)
return True
def _loadAgents(self): def _loadAgents(self):
"""Load all available agents from modules.""" """Load all available agents from modules."""
logger.info("Loading agent modules...") logger.info("Loading agent modules...")

View file

@ -8,6 +8,7 @@ import os
import io import io
from typing import Dict, Any, List, Optional, Union, Tuple from typing import Dict, Any, List, Optional, Union, Tuple
import base64 import base64
from modules.interfaces.lucydomModel import ChatContent
# Configure logger # Configure logger
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -21,7 +22,7 @@ class FileProcessingError(Exception):
"""Custom exception for file processing errors.""" """Custom exception for file processing errors."""
pass pass
def getDocumentContents(fileMetadata: Dict[str, Any], fileContent: bytes) -> List[Dict[str, Any]]: def getDocumentContents(fileMetadata: Dict[str, Any], fileContent: bytes) -> List[ChatContent]:
""" """
Main function for extracting content from a file based on its MIME type. Main function for extracting content from a file based on its MIME type.
Delegates to specialized extraction functions. Delegates to specialized extraction functions.
@ -31,7 +32,7 @@ def getDocumentContents(fileMetadata: Dict[str, Any], fileContent: bytes) -> Lis
fileContent: Binary data of the file fileContent: Binary data of the file
Returns: Returns:
List of Document-Content objects with metadata and base64Encoded flag List of ChatContent objects with metadata and base64Encoded flag
""" """
try: try:
mimeType = fileMetadata.get("mimeType", "application/octet-stream") mimeType = fileMetadata.get("mimeType", "application/octet-stream")
@ -142,36 +143,35 @@ def getDocumentContents(fileMetadata: Dict[str, Any], fileContent: bytes) -> Lis
# Convert binary content to base64 # Convert binary content to base64
encoded_data = base64.b64encode(fileContent).decode('utf-8') encoded_data = base64.b64encode(fileContent).decode('utf-8')
contents.append({ contents.append(ChatContent(
"sequenceNr": 1, sequenceNr=1,
"name": '1_undefined', name='1_undefined',
"ext": os.path.splitext(fileName)[1][1:] if os.path.splitext(fileName)[1] else "bin", mimeType=mimeType,
"mimeType": mimeType, data=encoded_data,
"data": encoded_data, metadata={
"base64Encoded": True, "isText": False,
"metadata": { "base64Encoded": True
"isText": False
} }
}) ))
# Add generic attributes for all documents # Add generic attributes for all documents
for content in contents: for content in contents:
# Make sure all content items have the base64Encoded flag # Make sure all content items have the base64Encoded flag
if "base64Encoded" not in content: if not hasattr(content, "base64Encoded"):
if isinstance(content.get("data"), bytes): if isinstance(content.data, bytes):
# Convert bytes to base64 # Convert bytes to base64
content["data"] = base64.b64encode(content["data"]).decode('utf-8') content.data = base64.b64encode(content.data).decode('utf-8')
content["base64Encoded"] = True content.base64Encoded = True
else: else:
# Assume text content if not explicitly marked # Assume text content if not explicitly marked
content["base64Encoded"] = False content.base64Encoded = False
# Maintain backward compatibility with old "base64Encoded" flag in metadata # Maintain backward compatibility with old "base64Encoded" flag in metadata
if "metadata" not in content: if not content.metadata:
content["metadata"] = {} content.metadata = {}
# Set base64Encoded in metadata for backward compatibility # Set base64Encoded in metadata for backward compatibility
content["metadata"]["base64Encoded"] = content["base64Encoded"] content.metadata["base64Encoded"] = content.base64Encoded
logger.info(f"Successfully extracted {len(contents)} content items from file '{fileName}'") logger.info(f"Successfully extracted {len(contents)} content items from file '{fileName}'")
return contents return contents
@ -179,18 +179,16 @@ def getDocumentContents(fileMetadata: Dict[str, Any], fileContent: bytes) -> Lis
except Exception as e: except Exception as e:
logger.error(f"Error during content extraction for file {fileMetadata.get('name', 'unknown')}: {str(e)}", exc_info=True) logger.error(f"Error during content extraction for file {fileMetadata.get('name', 'unknown')}: {str(e)}", exc_info=True)
# Fallback on error - return original data # Fallback on error - return original data
return [{ return [ChatContent(
"sequenceNr": 1, sequenceNr=1,
"name": fileMetadata.get("name", "unknown"), name=fileMetadata.get("name", "unknown"),
"ext": os.path.splitext(fileMetadata.get("name", ""))[1][1:] if os.path.splitext(fileMetadata.get("name", ""))[1] else "bin", mimeType=fileMetadata.get("mimeType", "application/octet-stream"),
"mimeType": fileMetadata.get("mimeType", "application/octet-stream"), data=base64.b64encode(fileContent).decode('utf-8'),
"data": base64.b64encode(fileContent).decode('utf-8'), metadata={
"base64Encoded": True,
"metadata": {
"isText": False, "isText": False,
"base64Encoded": True # For backward compatibility "base64Encoded": True
} }
}] )]
def _loadPdfExtractor(): def _loadPdfExtractor():
@ -979,156 +977,32 @@ def extractBinaryContent(fileName: str, fileContent: bytes, mimeType: str) -> Li
} }
}] }]
def processFile(self, fileContent: bytes, fileName: str, fileMetadata: Dict[str, Any] = None) -> List[Dict[str, Any]]:
"""
Process a file and return its contents as a list of documents.
Args: """Process a text file."""
fileContent: Binary content of the file
fileName: Name of the file
fileMetadata: Optional metadata about the file
Returns:
List of document dictionaries
"""
try: try:
# Get file extension and MIME type # Try to decode as text
fileExtension = os.path.splitext(fileName)[1].lower()[1:] try:
mimeType = fileMetadata.get("mimeType", self.serviceBase.getMimeType(fileName)) if fileMetadata else self.serviceBase.getMimeType(fileName) textContent = fileContent.decode('utf-8')
base64Encoded = False
except UnicodeDecodeError:
# If not valid UTF-8, encode as base64
textContent = base64.b64encode(fileContent).decode('utf-8')
base64Encoded = True
# Process based on file type # Create text document
if mimeType.startswith("image/"): textDoc = {
return self._processImageFile(fileContent, fileName, fileExtension, mimeType, fileMetadata) "name": fileName,
elif mimeType == "application/pdf": "ext": "txt",
return self._processPdfFile(fileContent, fileName, fileMetadata) "mimeType": "text/plain",
elif mimeType == "text/csv": "data": textContent,
return self._processCsvFile(fileContent, fileName, fileMetadata) "base64Encoded": base64Encoded,
elif mimeType == "text/plain": "metadata": {
return self._processTextFile(fileContent, fileName, fileMetadata) "isText": True
else: }
# Default binary file handling }
return [{
"name": fileName, return [textDoc]
"ext": fileExtension,
"mimeType": mimeType,
"data": base64.b64encode(fileContent).decode('utf-8'),
"base64Encoded": True,
"metadata": {
"isText": False
}
}]
except Exception as e: except Exception as e:
logger.error(f"Error processing file {fileName}: {str(e)}") logger.error(f"Error processing text file {fileName}: {str(e)}")
raise FileProcessingError(f"Error processing file: {str(e)}") raise FileProcessingError(f"Error processing text file: {str(e)}")
def _processImageFile(self, fileContent: bytes, fileName: str, fileExtension: str, mimeType: str, fileMetadata: Dict[str, Any] = None) -> List[Dict[str, Any]]:
"""Process an image file."""
try:
# Create image document
imageDoc = {
"name": fileName,
"ext": fileExtension,
"mimeType": mimeType,
"data": base64.b64encode(fileContent).decode('utf-8'),
"base64Encoded": True,
"metadata": {
"isText": False,
"isImage": True,
"format": fileExtension
}
}
# Add image description if available
if fileMetadata and "description" in fileMetadata:
imageDoc["metadata"]["description"] = fileMetadata["description"]
return [imageDoc]
except Exception as e:
logger.error(f"Error processing image file {fileName}: {str(e)}")
raise FileProcessingError(f"Error processing image file: {str(e)}")
def _processPdfFile(self, fileContent: bytes, fileName: str, fileMetadata: Dict[str, Any] = None) -> List[Dict[str, Any]]:
"""Process a PDF file."""
try:
# Create PDF document
pdfDoc = {
"name": fileName,
"ext": "pdf",
"mimeType": "application/pdf",
"data": base64.b64encode(fileContent).decode('utf-8'),
"base64Encoded": True,
"metadata": {
"isText": False,
"isPdf": True
}
}
return [pdfDoc]
except Exception as e:
logger.error(f"Error processing PDF file {fileName}: {str(e)}")
raise FileProcessingError(f"Error processing PDF file: {str(e)}")
def _processCsvFile(self, fileContent: bytes, fileName: str, fileMetadata: Dict[str, Any] = None) -> List[Dict[str, Any]]:
"""Process a CSV file."""
try:
# Try to decode as text first
try:
csvContent = fileContent.decode('utf-8')
base64Encoded = False
except UnicodeDecodeError:
# If not valid UTF-8, encode as base64
csvContent = base64.b64encode(fileContent).decode('utf-8')
base64Encoded = True
# Create CSV document
csvDoc = {
"name": fileName,
"ext": "csv",
"mimeType": "text/csv",
"data": csvContent,
"base64Encoded": base64Encoded,
"metadata": {
"isText": True,
"isCsv": True,
"format": "csv"
}
}
return [csvDoc]
except Exception as e:
logger.error(f"Error processing CSV file {fileName}: {str(e)}")
raise FileProcessingError(f"Error processing CSV file: {str(e)}")
def _processTextFile(self, fileContent: bytes, fileName: str, fileMetadata: Dict[str, Any] = None) -> List[Dict[str, Any]]:
"""Process a text file."""
try:
# Try to decode as text
try:
textContent = fileContent.decode('utf-8')
base64Encoded = False
except UnicodeDecodeError:
# If not valid UTF-8, encode as base64
textContent = base64.b64encode(fileContent).decode('utf-8')
base64Encoded = True
# Create text document
textDoc = {
"name": fileName,
"ext": "txt",
"mimeType": "text/plain",
"data": textContent,
"base64Encoded": base64Encoded,
"metadata": {
"isText": True
}
}
return [textDoc]
except Exception as e:
logger.error(f"Error processing text file {fileName}: {str(e)}")
raise FileProcessingError(f"Error processing text file: {str(e)}")

View file

@ -17,7 +17,7 @@ from modules.shared.mimeUtils import isTextMimeType
# Required imports # Required imports
from modules.workflow.agentRegistry import getAgentRegistry from modules.workflow.agentRegistry import getAgentRegistry
from modules.workflow.documentProcessor import getDocumentContents from modules.workflow.documentProcessor import getDocumentContents
from modules.interfaces.lucydomInterface import UserInputRequest from modules.interfaces.lucydomModel import UserInputRequest, ChatWorkflow, ChatMessage, ChatLog
# Configure logger # Configure logger
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -41,16 +41,13 @@ class WorkflowStoppedException(Exception):
class WorkflowManager: class WorkflowManager:
"""Manages the execution of workflows and their associated agents.""" """Manages the execution of workflows and their associated agents."""
def __init__(self, interfaceBase, interfaceMsft): def __init__(self, service):
"""Initialize the workflow manager with interface.""" """Initialize the workflow manager with service container."""
# Create service container # Store service container
self.service = type('ServiceContainer', (), { self.service = service
'base': interfaceBase,
'msft': interfaceMsft
})
self._mandateId = interfaceBase._mandateId self._mandateId = service.base._mandateId
self._userId = interfaceBase._userId self._userId = service.base._userId
self.agentRegistry = getAgentRegistry() self.agentRegistry = getAgentRegistry()
self.agentRegistry.initialize(service=self.service, workflowManager=self) self.agentRegistry.initialize(service=self.service, workflowManager=self)
@ -90,7 +87,7 @@ class WorkflowManager:
### Workflow State Machine Implementation ### Workflow State Machine Implementation
async def workflowStart(self, userInput: UserInputRequest, workflowId: Optional[str] = None) -> Dict[str, Any]: async def workflowStart(self, userInput: UserInputRequest, workflowId: Optional[str] = None) -> ChatWorkflow:
"""Starts a new workflow or continues an existing one.""" """Starts a new workflow or continues an existing one."""
try: try:
# Convert UserInputRequest to dict for processing # Convert UserInputRequest to dict for processing
@ -113,59 +110,38 @@ class WorkflowManager:
### Forces exit ### Forces exit
def checkExitCriteria(self, workflow: Dict[str, Any]): def checkExitCriteria(self, workflow: ChatWorkflow) -> None:
current_workflow = self.service.base.loadWorkflowState(workflow["id"])
if current_workflow["status"] in ["stopped", "failed"]:
self.logAdd(workflow, f"Workflow processing terminated due to status: {current_workflow['status']}", level="info")
# Raise an exception to stop execution
raise WorkflowStoppedException(f"Workflow execution stopped due to status: {current_workflow['status']}")
async def workflowStop(self, workflowId: str) -> Dict[str, Any]:
""" """
Stops a running workflow (State 8: Workflow Stopped). Check if the workflow should exit based on the current state.
Sets status to "stopped" and adds a log entry. Raises WorkflowStoppedException if workflow should stop.
Args: Args:
workflowId: ID of the workflow to stop workflow: ChatWorkflow object to check
Returns:
Updated workflow with status="stopped"
""" """
workflow = self.service.base.loadWorkflowState(workflowId) current_workflow = self.service.base.loadWorkflowState(workflow.id)
if not workflow: if current_workflow["state"] in ["stopped", "failed"]:
return {"error": "Workflow not found", "status": "failed"} self.logAdd(workflow, f"Workflow processing terminated due to state: {current_workflow['state']}", level="info")
# Raise an exception to stop execution
raise WorkflowStoppedException(f"Workflow execution stopped due to state: {current_workflow['state']}")
# Update status to stopped async def workflowProcess(self, userInput: Dict[str, Any], workflow: ChatWorkflow) -> ChatWorkflow:
workflow["status"] = "stopped"
workflow["lastActivity"] = datetime.now().isoformat()
# Update in database
self.service.base.updateWorkflow(workflowId, {
"status": workflow["status"],
"lastActivity": workflow["lastActivity"]
})
self.logAdd(workflow, GLOBAL_WORKFLOW_LABELS["workflowStatusMessages"]["stopped"], level="info", progress=100)
return workflow
async def workflowProcess(self, userInput: Dict[str, Any], workflow: Dict[str, Any]) -> Dict[str, Any]:
""" """
Main processing function that implements the workflow state machine. Main processing function that implements the workflow state machine.
Handles the complete workflow process from user input to final response. Handles the complete workflow process from user input to final response.
Args: Args:
userInput: User input with prompt and optional file list userInput: User input with prompt and optional file list
workflow: Current workflow object workflow: Current ChatWorkflow object
Returns: Returns:
Updated workflow with processing results Updated ChatWorkflow object with processing results
""" """
startTime = time.time() startTime = time.time()
try: try:
# State 3: User Message Processing # State 3: User Message Processing
self.checkExitCriteria(workflow) self.checkExitCriteria(workflow)
messageUser = await self.chatMessageToWorkflow("user", None, userInput, workflow) messageUser = await self.chatMessageToWorkflow("user", None, userInput, workflow)
messageUser["status"] = "first" # For first message messageUser.status = "first" # For first message
# State 4: Project Manager Analysis # State 4: Project Manager Analysis
self.checkExitCriteria(workflow) self.checkExitCriteria(workflow)
@ -182,13 +158,13 @@ class WorkflowManager:
# Save the response as a message in the workflow and add log entries # Save the response as a message in the workflow and add log entries
self.checkExitCriteria(workflow) self.checkExitCriteria(workflow)
responseMessage = { responseMessage = ChatMessage(
"role": "assistant", role="assistant",
"agentName": "Project Manager", agentName="Project Manager",
"content": objUserResponse, content=objUserResponse,
"status": "step" # As per state machine specification status="step" # As per state machine specification
} )
self.messageAdd(workflow, responseMessage) self.messageAdd(workflow, responseMessage.model_dump())
# Add detailed log entry about the task plan # Add detailed log entry about the task plan
taskPlanLog = "Input: " taskPlanLog = "Input: "
@ -255,8 +231,8 @@ class WorkflowManager:
self.checkExitCriteria(workflow) self.checkExitCriteria(workflow)
self.logAdd(workflow, "Creating final response", level="info", progress=90) self.logAdd(workflow, "Creating final response", level="info", progress=90)
finalMessage = await self.generateFinalMessage(objUserResponse, objFinalDocuments, objResults) finalMessage = await self.generateFinalMessage(objUserResponse, objFinalDocuments, objResults)
finalMessage["status"] = "last" # As per state machine specification finalMessage.status = "last" # As per state machine specification
self.messageAdd(workflow, finalMessage) self.messageAdd(workflow, finalMessage.model_dump())
# State 7: Workflow Completion # State 7: Workflow Completion
self.checkExitCriteria(workflow) self.checkExitCriteria(workflow)
@ -264,31 +240,31 @@ class WorkflowManager:
# Update processing time # Update processing time
endTime = time.time() endTime = time.time()
workflow["dataStats"]["processingTime"] = endTime - startTime workflow.dataStats["processingTime"] = endTime - startTime
return workflow return workflow
except Exception as e: except Exception as e:
# State 2: Workflow Exception # State 2: Workflow Exception
logger.error(f"Workflow processing error: {str(e)}", exc_info=True) logger.error(f"Workflow processing error: {str(e)}", exc_info=True)
workflow["status"] = "failed" workflow.state = "failed"
workflow["lastActivity"] = datetime.now().isoformat() workflow.lastActivity = datetime.now().isoformat()
# Update processing time even on error # Update processing time even on error
endTime = time.time() endTime = time.time()
workflow["dataStats"]["processingTime"] = endTime - startTime workflow.dataStats["processingTime"] = endTime - startTime
# Update in database # Update in database
self.service.base.updateWorkflow(workflow["id"], { self.service.base.updateWorkflow(workflow.id, {
"status": "failed", "state": "failed",
"lastActivity": workflow["lastActivity"], "lastActivity": workflow.lastActivity,
"dataStats": workflow["dataStats"] "dataStats": workflow.dataStats
}) })
self.logAdd(workflow, f"Workflow failed: {str(e)}", level="error", progress=100) self.logAdd(workflow, f"Workflow failed: {str(e)}", level="error", progress=100)
return workflow return workflow
def workflowInit(self, workflowId: Optional[str] = None) -> Dict[str, Any]: def workflowInit(self, workflowId: Optional[str] = None) -> ChatWorkflow:
""" """
Initializes a workflow or loads an existing one with round counting (State 1: Workflow Initialization). Initializes a workflow or loads an existing one with round counting (State 1: Workflow Initialization).
@ -296,47 +272,36 @@ class WorkflowManager:
workflowId: Optional - ID of the workflow to load workflowId: Optional - ID of the workflow to load
Returns: Returns:
Initialized workflow object Initialized ChatWorkflow object
""" """
currentTime = datetime.now().isoformat() currentTime = datetime.now().isoformat()
workflowExist=self.service.base.getWorkflow(workflowId) workflowExist = self.service.base.getWorkflow(workflowId)
if workflowId is None or not workflowExist: if workflowId is None or not workflowExist:
# Create new workflow # Create new workflow
newWorkflowId = str(uuid.uuid4()) if workflowId is None else workflowId newWorkflowId = str(uuid.uuid4()) if workflowId is None else workflowId
workflow = { workflow = ChatWorkflow(
"id": newWorkflowId, id=newWorkflowId,
"_mandateId": self._mandateId, _mandateId=self._mandateId,
"_userId": self._userId, _userId=self._userId,
"name": f"Workflow {newWorkflowId[:8]}", name=f"Workflow {newWorkflowId[:8]}",
"startedAt": currentTime, startedAt=currentTime,
"messages": [], # Empty list - will be filled with references messages=[], # Empty list - will be filled with references
"messageIds": [], # Initialize empty messageIds list messageIds=[], # Initialize empty messageIds list
"logs": [], logs=[],
"dataStats": { dataStats={
"bytesSent": 0, "bytesSent": 0,
"bytesReceived": 0, "bytesReceived": 0,
"tokensUsed": 0, "tokensUsed": 0,
"processingTime": 0.0 "processingTime": 0.0
}, },
"currentRound": 1, currentRound=1,
"status": "running", state="running",
"lastActivity": currentTime, lastActivity=currentTime,
} )
# Save to database - only the workflow metadata # Save to database - only the workflow metadata
workflowDb = { workflowDb = workflow.model_dump()
"id": workflow["id"],
"_mandateId": workflow["_mandateId"],
"_userId": workflow["_userId"],
"name": workflow["name"],
"startedAt": workflow["startedAt"],
"status": workflow["status"],
"dataStats": workflow["dataStats"],
"currentRound": workflow["currentRound"],
"lastActivity": workflow["lastActivity"],
"messageIds": workflow["messageIds"] # Include messageIds
}
self.service.base.createWorkflow(workflowDb) self.service.base.createWorkflow(workflowDb)
self.logAdd(workflow, GLOBAL_WORKFLOW_LABELS["workflowStatusMessages"]["init"], level="info", progress=0) self.logAdd(workflow, GLOBAL_WORKFLOW_LABELS["workflowStatusMessages"]["init"], level="info", progress=0)
@ -345,43 +310,41 @@ class WorkflowManager:
else: else:
# State 10: Workflow Resumption - Load existing workflow # State 10: Workflow Resumption - Load existing workflow
workflow = self.service.base.loadWorkflowState(workflowId) workflow = self.service.base.loadWorkflowState(workflowId)
workflow = ChatWorkflow(**workflow)
# Ensure messageIds exists # Ensure messageIds exists
if "messageIds" not in workflow: if not workflow.messageIds:
# Initialize from existing messages # Initialize from existing messages
workflow["messageIds"] = [msg["id"] for msg in workflow.get("messages", [])] workflow.messageIds = [msg["id"] for msg in workflow.messages]
# Update in database # Update in database
self.service.base.updateWorkflow(workflowId, {"messageIds": workflow["messageIds"]}) self.service.base.updateWorkflow(workflowId, {"messageIds": workflow.messageIds})
# Update status and increment round counter # Update status and increment round counter
workflow["status"] = "running" workflow.state = "running"
workflow["lastActivity"] = currentTime workflow.lastActivity = currentTime
# Increment currentRound if it exists, otherwise set it to 1 # Increment currentRound if it exists, otherwise set it to 1
if "currentRound" in workflow: workflow.currentRound = (workflow.currentRound or 0) + 1
workflow["currentRound"] += 1
else:
workflow["currentRound"] = 1
# Ensure dataStats exists with correct field names # Ensure dataStats exists with correct field names
if "dataStats" not in workflow: if not workflow.dataStats:
workflow["dataStats"] = { workflow.dataStats = {
"bytesSent": 0, "bytesSent": 0,
"bytesReceived": 0, "bytesReceived": 0,
"tokensUsed": 0, "tokensUsed": 0,
"processingTime": 0.0 "processingTime": 0.0
} }
elif "tokenCount" in workflow["dataStats"]: elif "tokenCount" in workflow.dataStats:
# Convert old tokenCount to tokensUsed if needed # Convert old tokenCount to tokensUsed if needed
workflow["dataStats"]["tokensUsed"] = workflow["dataStats"].pop("tokenCount", 0) workflow.dataStats["tokensUsed"] = workflow.dataStats.pop("tokenCount", 0)
# Update in database - only the relevant workflow fields # Update in database - only the relevant workflow fields
workflowUpdate = { workflowUpdate = {
"status": workflow["status"], "state": workflow.state,
"lastActivity": workflow["lastActivity"], "lastActivity": workflow.lastActivity,
"currentRound": workflow["currentRound"], "currentRound": workflow.currentRound,
"dataStats": workflow["dataStats"] # Include updated dataStats "dataStats": workflow.dataStats # Include updated dataStats
} }
self.service.base.updateWorkflow(workflowId, workflowUpdate) self.service.base.updateWorkflow(workflowId, workflowUpdate)
@ -1128,36 +1091,32 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
logger.error(f"Error processing content: {str(e)}") logger.error(f"Error processing content: {str(e)}")
return f"Error processing content: {str(e)}" return f"Error processing content: {str(e)}"
def messageAdd(self, workflow: Dict[str, Any], message: Dict[str, Any]) -> Dict[str, Any]: def messageAdd(self, workflow: ChatWorkflow, message: Dict[str, Any]) -> ChatMessage:
""" """
Adds a message to the workflow and updates lastActivity. Adds a message to the workflow and updates lastActivity.
Saves the message in the database and updates the workflow with references. Saves the message in the database and updates the workflow with references.
Also updates statistics for the message. Also updates statistics for the message.
Args: Args:
workflow: Workflow object workflow: ChatWorkflow object
message: Message to be saved message: Message data to be saved
Returns: Returns:
Added message Added ChatMessage object
""" """
currentTime = datetime.now().isoformat() currentTime = datetime.now().isoformat()
# Ensure messages list exists
if "messages" not in workflow:
workflow["messages"] = []
# Generate new message ID if not present # Generate new message ID if not present
if "id" not in message: if "id" not in message:
message["id"] = f"msg_{str(uuid.uuid4())}" message["id"] = f"msg_{str(uuid.uuid4())}"
# Add workflow ID and timestamps # Add workflow ID and timestamps
message["workflowId"] = workflow["id"] message["workflowId"] = workflow.id
message["startedAt"] = currentTime message["startedAt"] = currentTime
message["finishedAt"] = currentTime message["finishedAt"] = currentTime
# Set sequence number # Set sequence number
message["sequenceNo"] = len(workflow["messages"]) + 1 message["sequenceNo"] = len(workflow.messages) + 1
# Ensure required fields are present # Ensure required fields are present
if "role" not in message: if "role" not in message:
@ -1184,8 +1143,8 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
tokensUsed = bytesSent tokensUsed = bytesSent
# Update workflow statistics # Update workflow statistics
if "dataStats" not in workflow: if not workflow.dataStats:
workflow["dataStats"] = { workflow.dataStats = {
"bytesSent": 0, "bytesSent": 0,
"bytesReceived": 0, "bytesReceived": 0,
"tokensUsed": 0, "tokensUsed": 0,
@ -1194,37 +1153,40 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
# Update statistics based on message role # Update statistics based on message role
if message["role"] == "user": if message["role"] == "user":
workflow["dataStats"]["bytesSent"] += bytesSent workflow.dataStats["bytesSent"] += bytesSent
workflow["dataStats"]["tokensUsed"] += tokensUsed workflow.dataStats["tokensUsed"] += tokensUsed
else: # assistant messages else: # assistant messages
workflow["dataStats"]["bytesReceived"] += bytesSent workflow.dataStats["bytesReceived"] += bytesSent
workflow["dataStats"]["tokensUsed"] += tokensUsed workflow.dataStats["tokensUsed"] += tokensUsed
# Create ChatMessage object
chatMessage = ChatMessage(**message)
# Add message to workflow # Add message to workflow
workflow["messages"].append(message) workflow.messages.append(chatMessage)
# Ensure messageIds list exists # Ensure messageIds list exists
if "messageIds" not in workflow: if not workflow.messageIds:
workflow["messageIds"] = [] workflow.messageIds = []
# Add message ID to the messageIds list # Add message ID to the messageIds list
workflow["messageIds"].append(message["id"]) workflow.messageIds.append(chatMessage.id)
# Update workflow status # Update workflow status
workflow["lastActivity"] = currentTime workflow.lastActivity = currentTime
# Save to database - first the message itself # Save to database - first the message itself
self.service.base.createWorkflowMessage(message) self.service.base.createWorkflowMessage(chatMessage.model_dump())
# Then save the workflow with updated references and statistics # Then save the workflow with updated references and statistics
workflowUpdate = { workflowUpdate = {
"lastActivity": currentTime, "lastActivity": currentTime,
"messageIds": workflow["messageIds"], "messageIds": workflow.messageIds,
"dataStats": workflow["dataStats"] # Include updated statistics "dataStats": workflow.dataStats # Include updated statistics
} }
self.service.base.updateWorkflow(workflow["id"], workflowUpdate) self.service.base.updateWorkflow(workflow.id, workflowUpdate)
return message return chatMessage
def _trimDataInJson(self, jsonObj: Any) -> Any: def _trimDataInJson(self, jsonObj: Any) -> Any:
""" """
@ -1249,14 +1211,14 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
return result return result
return jsonObj return jsonObj
def logAdd(self, workflow: Dict[str, Any], message: str, level: str = "info", def logAdd(self, workflow: ChatWorkflow, message: str, level: str = "info",
progress: Optional[int] = None) -> str: progress: Optional[int] = None) -> str:
""" """
Adds a log entry to the workflow and also logs it in the logger. Adds a log entry to the workflow and also logs it in the logger.
Enhanced with standardized formatting and workflow status tracking. Enhanced with standardized formatting and workflow status tracking.
Args: Args:
workflow: Workflow object workflow: ChatWorkflow object
message: Log message message: Log message
level: Log level (info, warning, error) level: Log level (info, warning, error)
progress: Optional - Progress value (0-100) progress: Optional - Progress value (0-100)
@ -1264,15 +1226,11 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
Returns: Returns:
ID of the created log entry ID of the created log entry
""" """
# Ensure logs list exists
if "logs" not in workflow:
workflow["logs"] = []
# Generate log ID # Generate log ID
logId = f"log_{str(uuid.uuid4())}" logId = f"log_{str(uuid.uuid4())}"
# Get workflow status # Get workflow status
workflowStatus = workflow.get("status", "running") workflowStatus = workflow.state
# Set agentName from global settings # Set agentName from global settings
agentName = GLOBAL_WORKFLOW_LABELS.get("systemName", "unknown") agentName = GLOBAL_WORKFLOW_LABELS.get("systemName", "unknown")
@ -1291,33 +1249,33 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
pass pass
# Create log entry # Create log entry
logEntry = { logEntry = ChatLog(
"id": logId, id=logId,
"workflowId": workflow["id"], workflowId=workflow.id,
"message": processedMessage, message=processedMessage,
"type": level, type=level,
"timestamp": datetime.now().isoformat(), timestamp=datetime.now().isoformat(),
"agentName": agentName, agentName=agentName,
"status": workflowStatus status=workflowStatus
} )
# Add progress if provided # Add progress if provided
if progress is not None: if progress is not None:
logEntry["progress"] = progress logEntry.progress = progress
# Add log to workflow # Add log to workflow
workflow["logs"].append(logEntry) workflow.logs.append(logEntry)
# Save in database # Save in database
self.service.base.createWorkflowLog(logEntry) self.service.base.createWorkflowLog(logEntry.model_dump())
# Also log in logger # Also log in logger
if level == "info": if level == "info":
logger.info(f"Workflow {workflow['id']}: {processedMessage}") logger.info(f"Workflow {workflow.id}: {processedMessage}")
elif level == "warning": elif level == "warning":
logger.warning(f"Workflow {workflow['id']}: {processedMessage}") logger.warning(f"Workflow {workflow.id}: {processedMessage}")
elif level == "error": elif level == "error":
logger.error(f"Workflow {workflow['id']}: {processedMessage}") logger.error(f"Workflow {workflow.id}: {processedMessage}")
return logId return logId
@ -1571,9 +1529,9 @@ filesDelivered = {self.parseJson2text(matchingDocuments)}
_workflowManagers = {} _workflowManagers = {}
_workflowManagerLastAccess = {} # Track last access time for cleanup _workflowManagerLastAccess = {} # Track last access time for cleanup
async def getWorkflowManager(interfaceBase, interfaceMsft) -> WorkflowManager: async def getWorkflowManager(service) -> WorkflowManager:
"""Get or create a workflow manager instance.""" """Get or create a workflow manager instance."""
contextKey = f"{interfaceBase._mandateId}_{interfaceBase._userId}" contextKey = f"{service.base._mandateId}_{service.base._userId}"
# Check if we have a cached instance # Check if we have a cached instance
if contextKey in _workflowManagers: if contextKey in _workflowManagers:
@ -1581,7 +1539,7 @@ async def getWorkflowManager(interfaceBase, interfaceMsft) -> WorkflowManager:
return _workflowManagers[contextKey] return _workflowManagers[contextKey]
# Create new instance # Create new instance
manager = WorkflowManager(interfaceBase, interfaceMsft) manager = WorkflowManager(service)
# Cache the instance # Cache the instance
_workflowManagers[contextKey] = manager _workflowManagers[contextKey] = manager

View file

@ -1,40 +1,9 @@
....................... TASKS ....................... TASKS
TODO: Frontend to adapt
##################### #####################
CROSS-CHECK Wrkflow set
ERROR --- > when user logs in with "local" managed account and then logs in to msft account with "msft" authority, the userid is switched to the microsoft instance.
TODO: routeGeneral: To add User Model for user creation - or to pass to interface. to check!
TODO: All routes not to use "*interface.py" modules for checks or data handling. Full data handling, access control, uam to be in the "*Interface.py" modules. This to adapt.
TODO: Assign model classes for "create" and "update" functions. not to pass specific attributes to functions or routes
TODO: Interface assignment overall to adapt
TODO: Implement userid,mandateid change overall
TODO: Workflow-sub modules and agents to include Models and adaptions
###################
! function callAI() to ask with userPrompt,systemPrompt optional), not with json ! function callAI() to ask with userPrompt,systemPrompt optional), not with json
! in the taskplan to refer files always in context of user/mandate ! in the taskplan to refer files always in context of user/mandate
! userinput to handle with object AgentQuery --> when received in frontend to enhance for full object ! userinput to handle with object AgentQuery --> when received in frontend to enhance for full object
@ -94,6 +63,36 @@ Tools to transfer incl funds:
----------------------- DONE ----------------------- DONE
We have to correct the following wrong user access management.
Issue is: when user logs in with "local" managed account and then logs in to msft account with "msft" authority, the userid is switched to the microsoft instance in the workflow. this must not happen.
Objective: The correct logic is, that a user logs in with an account (managed by "local" or other authority). Once logged in, his login does not change, also if he connects to microsoft account afterwards.
Problem: We have a mix between user-login (creating currentUser profile) and user-connections (attaching user to a service, like "msft" - and future other services in parallel).
Concept: We need to separate user-login and user-connections:
1. the ui login and register modules produce a user-login, resulting in a currentUser profile in the backend to be used for workflow and other activities. the user gets a token (from "local" or "msft" or furthers). this token has to be checked when user logs in. ALWAYS a check is required by the according registration authority.
those use cases:
- if user registers with a "local" profile, a new user is created, a local token is produced
- if user logs in with a "local" profile for an existing user, a local token is produced
- if user logs in with a "local" profile for a non-existing user, login is denied (no user)
- if user logs in with a "msft" profile (or other foreign profile) for an existing user, a local token AND a token in "msft" database (or other foreign system) is produced
- if user logs in with a "msft" profile (or other foreign profile) for a non-existing user, a local profile is generated based on information from foreign account, then a local token AND a token in "msft" database (or other foreign system) is produced
2. the ui navigation buttons for "Login MSFT" or future other buttons to connect to services (like e.g. google account or github account or microsoft "msft" account, etc.) does NOT generate a user-login, only a user-connection to a service.
soloution:
So there must be a mechanism, which manages user-login and user-connection. Following proposition: User has a user profile to login and a list of profiles for user-connections.
Examples:
- user registers with "local" profile --> he gets local profile with 0 user-connections
- user registers with "msft" profile --> he gets local profile with 1 user-connections to "msft". Then he connects to another "msft" profile. Now he gets local profile 2 user-connections "msft"
- user registers with "google" profile (future) --> he gets local profile and 1 user-connections to "google". Then he connects to another "msft" profile. Now he gets local profile and 1 user-connections "msft" and 1 user-connection "google".
can you tell me, how you would implement this adapted model into the pydantic model and into the code modules in a structured and maintainable way?
i want to refactor the user management in the backend through the user journey. currrently we have two problems: we always pass _userid and _mandate or id with _mandate from function to function, which blocks scaling. this is too complicated and non-logic. i want to refactor the user management in the backend through the user journey. currrently we have two problems: we always pass _userid and _mandate or id with _mandate from function to function, which blocks scaling. this is too complicated and non-logic.
to adapt the following: to adapt the following:

View file

@ -9,6 +9,8 @@ pydantic==1.10.13 # Ältere Version ohne Rust-Abhängigkeit
python-jose==3.3.0 python-jose==3.3.0
passlib==1.7.4 passlib==1.7.4
argon2-cffi>=21.3.0 # Für Passwort-Hashing in gateway_interface.py argon2-cffi>=21.3.0 # Für Passwort-Hashing in gateway_interface.py
google-auth-oauthlib==1.2.0 # Für Google OAuth
google-auth==2.27.0 # Für Google Authentication
## Database ## Database
mysql-connector-python==8.1.0 mysql-connector-python==8.1.0