Cleaned all import lists

This commit is contained in:
ValueOn AG 2025-10-24 23:57:17 +02:00
parent 8523da7fe2
commit daddf417be
75 changed files with 216 additions and 530 deletions

1
app.py
View file

@ -1,6 +1,5 @@
import os
import sys
import asyncio
from urllib.parse import quote_plus
os.environ["NUMEXPR_MAX_THREADS"] = "12"

View file

@ -1,7 +1,7 @@
import logging
import httpx
import os
from typing import Dict, Any, List, Union
from typing import Dict, Any, List
from fastapi import HTTPException
from modules.shared.configuration import APP_CONFIG
from modules.aicore.aicoreBase import BaseConnectorAi

View file

@ -1,5 +1,5 @@
import logging
from typing import Dict, Any, List, Union
from typing import List
from modules.aicore.aicoreBase import BaseConnectorAi
from modules.datamodels.datamodelAi import AiModel, PriorityEnum, ProcessingModeEnum, OperationTypeEnum, AiModelCall, AiModelResponse, createOperationTypeRatings

View file

@ -1,7 +1,6 @@
import logging
import base64
import httpx
from typing import Dict, Any, List, Union
from typing import List
from fastapi import HTTPException
from modules.shared.configuration import APP_CONFIG
from modules.aicore.aicoreBase import BaseConnectorAi

View file

@ -1,7 +1,6 @@
import logging
import httpx
import asyncio
from typing import Dict, Any, List, Union, Optional
from typing import List
from fastapi import HTTPException
from modules.shared.configuration import APP_CONFIG
from modules.aicore.aicoreBase import BaseConnectorAi

View file

@ -8,7 +8,6 @@ from dataclasses import dataclass
from typing import Optional, List
from tavily import AsyncTavilyClient
from modules.shared.configuration import APP_CONFIG
from modules.shared.timezoneUtils import get_utc_timestamp
from modules.aicore.aicoreBase import BaseConnectorAi
from modules.datamodels.datamodelAi import AiModel, PriorityEnum, ProcessingModeEnum, OperationTypeEnum, AiModelResponse, createOperationTypeRatings

View file

@ -1,14 +1,12 @@
import json
import os
from typing import List, Dict, Any, Optional, Union, TypedDict
from typing import List, Dict, Any, Optional, TypedDict
import logging
from datetime import datetime
import uuid
from pydantic import BaseModel
import threading
import time
from modules.shared.attributeUtils import to_dict
from modules.shared.timezoneUtils import get_utc_timestamp
logger = logging.getLogger(__name__)
@ -567,7 +565,7 @@ class DatabaseConnector:
# If record is a Pydantic model, convert to dict
if isinstance(record, BaseModel):
record = to_dict(record)
record = record.model_dump()
# Save record
self._saveRecord(table, record["id"], record)
@ -582,7 +580,7 @@ class DatabaseConnector:
# If record is a Pydantic model, convert to dict
if isinstance(record, BaseModel):
record = to_dict(record)
record = record.model_dump()
# CRITICAL: Ensure we never modify the ID
if "id" in record and str(record["id"]) != recordId:

View file

@ -1,16 +1,11 @@
import psycopg2
import psycopg2.extras
import json
import os
import logging
from typing import List, Dict, Any, Optional, Union, get_origin, get_args
from datetime import datetime
import uuid
from pydantic import BaseModel, Field
import threading
import time
from modules.shared.attributeUtils import to_dict, ModelMixin
from modules.shared.timezoneUtils import get_utc_timestamp
from modules.shared.configuration import APP_CONFIG
@ -19,7 +14,7 @@ logger = logging.getLogger(__name__)
# No mapping needed - table name = Pydantic model name exactly
class SystemTable(BaseModel, ModelMixin):
class SystemTable(BaseModel):
"""Data model for system table entries"""
table_name: str = Field(
@ -39,22 +34,13 @@ class SystemTable(BaseModel, ModelMixin):
def _get_model_fields(model_class) -> Dict[str, str]:
"""Get all fields from Pydantic model and map to SQL types."""
# Pydantic v2 uses model_fields instead of __fields__
if hasattr(model_class, "model_fields"):
# Pydantic v2
model_fields = model_class.model_fields
elif hasattr(model_class, "__fields__"):
model_fields = model_class.__fields__
else:
return {}
fields = {}
for field_name, field_info in model_fields.items():
# Pydantic v2 uses annotation instead of type_
field_type = (
field_info.annotation
if hasattr(field_info, "annotation")
else field_info.type_
)
# Pydantic v2
field_type = field_info.annotation
# Check for JSONB fields (Dict, List, or complex types)
if (
@ -538,11 +524,8 @@ class DatabaseConnector:
# If not valid JSON, convert to JSON string
value = json.dumps(value)
elif hasattr(value, 'model_dump'):
# Handle Pydantic v2 models
# Handle Pydantic models
value = json.dumps(value.model_dump())
elif hasattr(value, 'dict'):
# Handle Pydantic v1 models
value = json.dumps(value.dict())
else:
# Convert other types to JSON
value = json.dumps(value)
@ -912,7 +895,7 @@ class DatabaseConnector:
"""Creates a new record in a table based on Pydantic model class."""
# If record is a Pydantic model, convert to dict
if isinstance(record, BaseModel):
record = to_dict(record)
record = record.model_dump()
elif isinstance(record, dict):
record = record.copy()
else:
@ -947,7 +930,7 @@ class DatabaseConnector:
# If record is a Pydantic model, convert to dict
if isinstance(record, BaseModel):
record = to_dict(record)
record = record.model_dump()
elif isinstance(record, dict):
record = record.copy()
else:

View file

@ -3,12 +3,9 @@ Google Cloud Speech-to-Text and Translation Connector
Replaces Azure Speech Services with Google Cloud APIs
"""
import os
import io
import json
import html
import logging
import asyncio
from typing import Dict, Optional, Any
from google.cloud import speech
from google.cloud import translate_v2 as translate

View file

@ -1,4 +1,4 @@
from typing import Optional, List, Dict, Any, Literal, Callable, TYPE_CHECKING, Tuple
from typing import Optional, List, Dict, Any, Callable, TYPE_CHECKING, Tuple
from pydantic import BaseModel, Field
from enum import Enum

View file

@ -3,12 +3,12 @@
from typing import List, Dict, Any, Optional
from enum import Enum
from pydantic import BaseModel, Field
from modules.shared.attributeUtils import register_model_labels, ModelMixin
from modules.shared.attributeUtils import register_model_labels
from modules.shared.timezoneUtils import get_utc_timestamp
import uuid
class ChatStat(BaseModel, ModelMixin):
class ChatStat(BaseModel):
id: str = Field(
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
)
@ -43,7 +43,7 @@ register_model_labels(
)
class ChatLog(BaseModel, ModelMixin):
class ChatLog(BaseModel):
id: str = Field(
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
)
@ -79,7 +79,7 @@ register_model_labels(
)
class ChatDocument(BaseModel, ModelMixin):
class ChatDocument(BaseModel):
id: str = Field(
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
)
@ -114,7 +114,7 @@ register_model_labels(
)
class ContentMetadata(BaseModel, ModelMixin):
class ContentMetadata(BaseModel):
size: int = Field(description="Content size in bytes")
pages: Optional[int] = Field(
None, description="Number of pages for multi-page content"
@ -151,7 +151,7 @@ register_model_labels(
)
class ContentItem(BaseModel, ModelMixin):
class ContentItem(BaseModel):
label: str = Field(description="Content label")
data: str = Field(description="Extracted text content")
metadata: ContentMetadata = Field(description="Content metadata")
@ -168,7 +168,7 @@ register_model_labels(
)
class ChatContentExtracted(BaseModel, ModelMixin):
class ChatContentExtracted(BaseModel):
id: str = Field(description="Reference to source ChatDocument")
contents: List[ContentItem] = Field(
default_factory=list, description="List of content items"
@ -185,7 +185,7 @@ register_model_labels(
)
class ChatMessage(BaseModel, ModelMixin):
class ChatMessage(BaseModel):
id: str = Field(
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
)
@ -263,7 +263,7 @@ register_model_labels(
)
class ChatWorkflow(BaseModel, ModelMixin):
class ChatWorkflow(BaseModel):
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
description="Primary key",
@ -420,7 +420,7 @@ register_model_labels(
)
class UserInputRequest(BaseModel, ModelMixin):
class UserInputRequest(BaseModel):
prompt: str = Field(description="Prompt for the user")
listFileId: List[str] = Field(default_factory=list, description="List of file IDs")
userLanguage: str = Field(default="en", description="User's preferred language")
@ -437,7 +437,7 @@ register_model_labels(
)
class ActionDocument(BaseModel, ModelMixin):
class ActionDocument(BaseModel):
"""Clear document structure for action results"""
documentName: str = Field(description="Name of the document")
@ -456,7 +456,7 @@ register_model_labels(
)
class ActionResult(BaseModel, ModelMixin):
class ActionResult(BaseModel):
"""Clean action result with documents as primary output
IMPORTANT: Action methods should NOT set resultLabel in their return value.
@ -497,7 +497,7 @@ register_model_labels(
)
class ActionSelection(BaseModel, ModelMixin):
class ActionSelection(BaseModel):
method: str = Field(description="Method to execute (e.g., web, document, ai)")
name: str = Field(
description="Action name within the method (e.g., search, extract)"
@ -514,7 +514,7 @@ register_model_labels(
)
class ActionParameters(BaseModel, ModelMixin):
class ActionParameters(BaseModel):
parameters: Dict[str, Any] = Field(
default_factory=dict, description="Parameters to execute the selected action"
)
@ -529,7 +529,7 @@ register_model_labels(
)
class ObservationPreview(BaseModel, ModelMixin):
class ObservationPreview(BaseModel):
name: str = Field(description="Document name or URL label")
mime: str = Field(description="MIME type or kind")
snippet: str = Field(description="Short snippet or summary")
@ -546,7 +546,7 @@ register_model_labels(
)
class Observation(BaseModel, ModelMixin):
class Observation(BaseModel):
success: bool = Field(description="Action execution success flag")
resultLabel: str = Field(description="Deterministic label for produced documents")
documentsCount: int = Field(description="Number of produced documents")
@ -592,7 +592,7 @@ register_model_labels(
)
class DocumentExchange(BaseModel, ModelMixin):
class DocumentExchange(BaseModel):
documentsLabel: str = Field(description="Label for the set of documents")
documents: List[str] = Field(
default_factory=list, description="List of document references"
@ -609,7 +609,7 @@ register_model_labels(
)
class ActionItem(BaseModel, ModelMixin):
class ActionItem(BaseModel):
id: str = Field(..., description="Action ID")
execMethod: str = Field(..., description="Method to execute")
execAction: str = Field(..., description="Action to perform")
@ -675,7 +675,7 @@ register_model_labels(
)
class TaskResult(BaseModel, ModelMixin):
class TaskResult(BaseModel):
taskId: str = Field(..., description="Task ID")
status: TaskStatus = Field(default=TaskStatus.PENDING, description="Task status")
success: bool = Field(..., description="Whether the task was successful")
@ -696,7 +696,7 @@ register_model_labels(
)
class TaskItem(BaseModel, ModelMixin):
class TaskItem(BaseModel):
id: str = Field(..., description="Task ID")
workflowId: str = Field(..., description="Workflow ID")
userInput: str = Field(..., description="User input that triggered the task")
@ -747,7 +747,7 @@ register_model_labels(
)
class TaskStep(BaseModel, ModelMixin):
class TaskStep(BaseModel):
id: str
objective: str
dependencies: Optional[list[str]] = Field(default_factory=list)
@ -775,7 +775,7 @@ register_model_labels(
)
class TaskHandover(BaseModel, ModelMixin):
class TaskHandover(BaseModel):
taskId: str = Field(description="Target task ID")
sourceTask: Optional[str] = Field(None, description="Source task ID")
inputDocuments: List[DocumentExchange] = Field(
@ -824,7 +824,7 @@ register_model_labels(
)
class TaskContext(BaseModel, ModelMixin):
class TaskContext(BaseModel):
task_step: TaskStep
workflow: Optional["ChatWorkflow"] = None
workflow_id: Optional[str] = None
@ -856,7 +856,7 @@ class TaskContext(BaseModel, ModelMixin):
self.improvements.append(improvement)
class ReviewContext(BaseModel, ModelMixin):
class ReviewContext(BaseModel):
task_step: TaskStep
task_actions: Optional[list] = Field(default_factory=list)
action_results: Optional[list] = Field(default_factory=list)
@ -865,7 +865,7 @@ class ReviewContext(BaseModel, ModelMixin):
previous_results: Optional[list[str]] = Field(default_factory=list)
class ReviewResult(BaseModel, ModelMixin):
class ReviewResult(BaseModel):
status: str
reason: Optional[str] = None
improvements: Optional[list[str]] = Field(default_factory=list)
@ -896,7 +896,7 @@ register_model_labels(
)
class TaskPlan(BaseModel, ModelMixin):
class TaskPlan(BaseModel):
overview: str
tasks: list[TaskStep]
userMessage: Optional[str] = Field(
@ -918,7 +918,7 @@ register_model_labels(
TaskContext.update_forward_refs()
class PromptPlaceholder(BaseModel, ModelMixin):
class PromptPlaceholder(BaseModel):
label: str
content: str
summaryAllowed: bool = Field(
@ -938,7 +938,7 @@ register_model_labels(
)
class PromptBundle(BaseModel, ModelMixin):
class PromptBundle(BaseModel):
prompt: str
placeholders: List[PromptPlaceholder] = Field(default_factory=list)

View file

@ -2,13 +2,13 @@
from typing import Dict, Any, Optional, Union
from pydantic import BaseModel, Field
from modules.shared.attributeUtils import register_model_labels, ModelMixin
from modules.shared.attributeUtils import register_model_labels
from modules.shared.timezoneUtils import get_utc_timestamp
import uuid
import base64
class FileItem(BaseModel, ModelMixin):
class FileItem(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", frontend_type="text", frontend_readonly=True, frontend_required=False)
mandateId: str = Field(description="ID of the mandate this file belongs to", frontend_type="text", frontend_readonly=True, frontend_required=False)
fileName: str = Field(description="Name of the file", frontend_type="text", frontend_readonly=False, frontend_required=True)
@ -17,8 +17,6 @@ class FileItem(BaseModel, ModelMixin):
fileSize: int = Field(description="Size of the file in bytes", frontend_type="integer", frontend_readonly=True, frontend_required=False)
creationDate: float = Field(default_factory=get_utc_timestamp, description="Date when the file was created (UTC timestamp in seconds)", frontend_type="timestamp", frontend_readonly=True, frontend_required=False)
def to_dict(self) -> Dict[str, Any]:
return super().to_dict()
register_model_labels(
"FileItem",
{"en": "File Item", "fr": "Élément de fichier"},
@ -33,7 +31,7 @@ register_model_labels(
},
)
class FilePreview(BaseModel, ModelMixin):
class FilePreview(BaseModel):
content: Union[str, bytes] = Field(description="File content (text or binary)")
mimeType: str = Field(description="MIME type of the file")
fileName: str = Field(description="Original fileName")
@ -41,8 +39,9 @@ class FilePreview(BaseModel, ModelMixin):
encoding: Optional[str] = Field(None, description="Text encoding if content is text")
size: int = Field(description="Size of the content in bytes")
def to_dict(self) -> Dict[str, Any]:
data = super().to_dict()
def toDictWithBase64Encoding(self) -> Dict[str, Any]:
"""Convert to dictionary with base64 encoding for binary content."""
data = self.model_dump()
if isinstance(data.get("content"), bytes):
data["content"] = base64.b64encode(data["content"]).decode("utf-8")
return data
@ -59,7 +58,7 @@ register_model_labels(
},
)
class FileData(BaseModel, ModelMixin):
class FileData(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
data: str = Field(description="File data content")
base64Encoded: bool = Field(description="Whether the data is base64 encoded")

View file

@ -3,10 +3,10 @@
import uuid
from typing import Optional
from pydantic import BaseModel, Field
from modules.shared.attributeUtils import register_model_labels, ModelMixin
from modules.shared.attributeUtils import register_model_labels
class DataNeutraliserConfig(BaseModel, ModelMixin):
class DataNeutraliserConfig(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the configuration", frontend_type="text", frontend_readonly=True, frontend_required=False)
mandateId: str = Field(description="ID of the mandate this configuration belongs to", frontend_type="text", frontend_readonly=True, frontend_required=True)
userId: str = Field(description="ID of the user who created this configuration", frontend_type="text", frontend_readonly=True, frontend_required=True)
@ -28,7 +28,7 @@ register_model_labels(
},
)
class DataNeutralizerAttributes(BaseModel, ModelMixin):
class DataNeutralizerAttributes(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the attribute mapping (used as UID in neutralized files)", frontend_type="text", frontend_readonly=True, frontend_required=False)
mandateId: str = Field(description="ID of the mandate this attribute belongs to", frontend_type="text", frontend_readonly=True, frontend_required=True)
userId: str = Field(description="ID of the user who created this attribute", frontend_type="text", frontend_readonly=True, frontend_required=True)

View file

@ -1,8 +1,8 @@
"""Security models: Token and AuthEvent."""
from typing import Optional
from pydantic import BaseModel, Field, ConfigDict
from modules.shared.attributeUtils import register_model_labels, ModelMixin
from pydantic import BaseModel, Field
from modules.shared.attributeUtils import register_model_labels
from modules.shared.timezoneUtils import get_utc_timestamp
from .datamodelUam import AuthAuthority
from enum import Enum
@ -14,7 +14,7 @@ class TokenStatus(str, Enum):
REVOKED = "revoked"
class Token(BaseModel, ModelMixin):
class Token(BaseModel):
id: Optional[str] = None
userId: str
authority: AuthAuthority
@ -74,7 +74,7 @@ register_model_labels(
)
class AuthEvent(BaseModel, ModelMixin):
class AuthEvent(BaseModel):
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
description="Unique ID of the auth event",

View file

@ -4,7 +4,7 @@ import uuid
from typing import Optional
from enum import Enum
from pydantic import BaseModel, Field, EmailStr
from modules.shared.attributeUtils import register_model_labels, ModelMixin
from modules.shared.attributeUtils import register_model_labels
from modules.shared.timezoneUtils import get_utc_timestamp
@ -24,7 +24,7 @@ class ConnectionStatus(str, Enum):
REVOKED = "revoked"
PENDING = "pending"
class Mandate(BaseModel, ModelMixin):
class Mandate(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the mandate", frontend_type="text", frontend_readonly=True, frontend_required=False)
name: str = Field(description="Name of the mandate", frontend_type="text", frontend_readonly=False, frontend_required=True)
language: str = Field(default="en", description="Default language of the mandate", frontend_type="select", frontend_readonly=False, frontend_required=True, frontend_options=[
@ -45,7 +45,7 @@ register_model_labels(
},
)
class UserConnection(BaseModel, ModelMixin):
class UserConnection(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the connection", frontend_type="text", frontend_readonly=True, frontend_required=False)
userId: str = Field(description="ID of the user this connection belongs to", frontend_type="text", frontend_readonly=True, frontend_required=False)
authority: AuthAuthority = Field(description="Authentication authority", frontend_type="select", frontend_readonly=True, frontend_required=False, frontend_options=[
@ -90,7 +90,7 @@ register_model_labels(
},
)
class User(BaseModel, ModelMixin):
class User(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the user", frontend_type="text", frontend_readonly=True, frontend_required=False)
username: str = Field(description="Username for login", frontend_type="text", frontend_readonly=False, frontend_required=True)
email: Optional[EmailStr] = Field(None, description="Email address of the user", frontend_type="email", frontend_readonly=False, frontend_required=True)

View file

@ -1,11 +1,11 @@
"""Utility datamodels: Prompt."""
from pydantic import BaseModel, Field
from modules.shared.attributeUtils import register_model_labels, ModelMixin
from modules.shared.attributeUtils import register_model_labels
import uuid
class Prompt(BaseModel, ModelMixin):
class Prompt(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", frontend_type="text", frontend_readonly=True, frontend_required=False)
mandateId: str = Field(description="ID of the mandate this prompt belongs to", frontend_type="text", frontend_readonly=True, frontend_required=False)
content: str = Field(description="Content of the prompt", frontend_type="textarea", frontend_readonly=False, frontend_required=True)

View file

@ -1,13 +1,12 @@
"""Voice settings datamodel."""
from typing import Dict, Any, Optional
from pydantic import BaseModel, Field
from modules.shared.attributeUtils import register_model_labels, ModelMixin
from modules.shared.attributeUtils import register_model_labels
from modules.shared.timezoneUtils import get_utc_timestamp
import uuid
class VoiceSettings(BaseModel, ModelMixin):
class VoiceSettings(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", frontend_type="text", frontend_readonly=True, frontend_required=False)
userId: str = Field(description="ID of the user these settings belong to", frontend_type="text", frontend_readonly=True, frontend_required=True)
mandateId: str = Field(description="ID of the mandate these settings belong to", frontend_type="text", frontend_readonly=True, frontend_required=False)
@ -19,8 +18,6 @@ class VoiceSettings(BaseModel, ModelMixin):
creationDate: float = Field(default_factory=get_utc_timestamp, description="Date when the settings were created (UTC timestamp in seconds)", frontend_type="timestamp", frontend_readonly=True, frontend_required=False)
lastModified: float = Field(default_factory=get_utc_timestamp, description="Date when the settings were last modified (UTC timestamp in seconds)", frontend_type="timestamp", frontend_readonly=True, frontend_required=False)
def to_dict(self) -> Dict[str, Any]:
return super().to_dict()
register_model_labels(
"VoiceSettings",

View file

@ -1,5 +1,4 @@
import logging
import asyncio
from typing import Optional
from modules.datamodels.datamodelUam import User

View file

@ -5,7 +5,6 @@ This module handles the synchronization of tickets to SharePoint using the new
Graph API-based connector architecture.
"""
import asyncio
import logging
import os
import io
@ -13,7 +12,6 @@ import pandas as pd
import csv as csv_module
from io import StringIO, BytesIO
from datetime import datetime, UTC
from typing import Dict, Any, List, Optional
from modules.services import getInterface as getServices
logger = logging.getLogger(__name__)

View file

@ -1,5 +1,6 @@
import logging
import asyncio
import uuid
from typing import Dict, Any, List, Union, Tuple, Optional
from dataclasses import dataclass
import time
@ -17,6 +18,7 @@ from modules.datamodels.datamodelAi import (
AiModelCall,
AiModelResponse,
)
from modules.datamodels.datamodelExtraction import ContentPart
# Dynamic model registry - models are now loaded from connectors via aicore system
@ -299,7 +301,7 @@ class AiObjects:
for i, result in enumerate(partResults):
if result.content:
content_part = ContentPart(
id=makeId(),
id=str(uuid.uuid4()),
parentId=None,
label=f"ai_result_{i}",
typeGroup="text", # Default to text for AI results
@ -339,14 +341,12 @@ class AiObjects:
return ""
# Convert AiCallResponse results to ContentParts for merging
from modules.datamodels.datamodelExtraction import ContentPart
from modules.services.serviceExtraction.subUtils import makeId
content_parts = []
for i, result in enumerate(chunkResults):
if result.content:
content_part = ContentPart(
id=makeId(),
id=str(uuid.uuid4()),
parentId=None,
label=f"chunk_result_{i}",
typeGroup="text", # Default to text for AI results
@ -1075,15 +1075,15 @@ Format your response in a clear, professional manner that would be helpful for s
"""List available models, optionally filtered by connector type."""
models = modelRegistry.getAvailableModels()
if connectorType:
return [model.dict() for model in models if model.connectorType == connectorType]
return [model.dict() for model in models]
return [model.model_dump() for model in models if model.connectorType == connectorType]
return [model.model_dump() for model in models]
async def getModelInfo(self, modelName: str) -> Dict[str, Any]:
"""Get information about a specific model."""
model = modelRegistry.getModel(modelName)
if not model:
raise ValueError(f"Model {modelName} not found")
return model.dict()
return model.model_dump()
async def getModelsByTag(self, tag: str) -> List[str]:

View file

@ -4,10 +4,8 @@ Access control for the Application.
import logging
from typing import Dict, Any, List, Optional
from datetime import datetime
from modules.datamodels.datamodelUam import UserPrivilege, User, UserInDB, Mandate
from modules.datamodels.datamodelSecurity import AuthEvent
from modules.shared.timezoneUtils import get_utc_now
# Configure logger
logger = logging.getLogger(__name__)

View file

@ -3,19 +3,14 @@ Interface to the Gateway system.
Manages users and mandates for authentication.
"""
from datetime import datetime, timedelta, UTC
import os
import logging
from typing import Dict, Any, List, Optional, Union
import importlib
import json
from typing import Dict, Any, List, Optional
from passlib.context import CryptContext
import uuid
import re
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG
from modules.shared.timezoneUtils import get_utc_now, get_utc_timestamp
from modules.shared.timezoneUtils import get_utc_timestamp
from modules.interfaces.interfaceDbAppAccess import AppAccess
from modules.datamodels.datamodelUam import (
User,
@ -259,7 +254,7 @@ class AppObjects:
filteredUsers = self._uam(UserInDB, users)
# Convert to User models
return [User.from_dict(user) for user in filteredUsers]
return [User(**user) for user in filteredUsers]
def getUserByUsername(self, username: str) -> Optional[User]:
"""Returns a user by username."""
@ -272,7 +267,7 @@ class AppObjects:
# Find user by username
for user_dict in users:
if user_dict.get("username") == username:
return User.from_dict(user_dict)
return User(**user_dict)
logger.info(f"No user found with username {username}")
return None
@ -295,7 +290,7 @@ class AppObjects:
# Apply access control
filteredUsers = self._uam(UserInDB, [user_dict])
if filteredUsers:
return User.from_dict(filteredUsers[0])
return User(**filteredUsers[0])
return None
return None
@ -398,7 +393,7 @@ class AppObjects:
# Clear cache to ensure fresh data (already done above)
return User.from_dict(createdUser[0])
return User(**createdUser[0])
except ValueError as e:
logger.error(f"Error creating user: {str(e)}")
@ -416,9 +411,9 @@ class AppObjects:
raise ValueError(f"User {userId} not found")
# Update user data using model
updatedData = user.to_dict()
updatedData = user.model_dump()
updatedData.update(updateData)
updatedUser = User.from_dict(updatedData)
updatedUser = User(**updatedData)
# Update user record
self.db.recordModify(UserInDB, userId, updatedUser)
@ -647,7 +642,7 @@ class AppObjects:
"""Returns all mandates based on user access level."""
allMandates = self.db.getRecordset(Mandate)
filteredMandates = self._uam(Mandate, allMandates)
return [Mandate.from_dict(mandate) for mandate in filteredMandates]
return [Mandate(**mandate) for mandate in filteredMandates]
def getMandate(self, mandateId: str) -> Optional[Mandate]:
"""Returns a mandate by ID if user has access."""
@ -659,7 +654,7 @@ class AppObjects:
if not filteredMandates:
return None
return Mandate.from_dict(filteredMandates[0])
return Mandate(**filteredMandates[0])
def createMandate(self, name: str, language: str = "en") -> Mandate:
"""Creates a new mandate if user has permission."""
@ -674,7 +669,7 @@ class AppObjects:
if not createdRecord or not createdRecord.get("id"):
raise ValueError("Failed to create mandate record")
return Mandate.from_dict(createdRecord)
return Mandate(**createdRecord)
def updateMandate(self, mandateId: str, updateData: Dict[str, Any]) -> Mandate:
"""Updates a mandate if user has access."""
@ -689,9 +684,9 @@ class AppObjects:
raise ValueError(f"Mandate {mandateId} not found")
# Update mandate data using model
updatedData = mandate.to_dict()
updatedData = mandate.model_dump()
updatedData.update(updateData)
updatedMandate = Mandate.from_dict(updatedData)
updatedMandate = Mandate(**updatedData)
# Update mandate record
self.db.recordModify(Mandate, mandateId, updatedMandate)
@ -793,10 +788,10 @@ class AppObjects:
# Continue with saving the new token even if deletion fails
# Convert to dict and ensure all fields are properly set
token_dict = token.to_dict()
token_dict = token.model_dump()
# Ensure userId is set to current user
# Convert to dict and ensure all fields are properly set
token_dict = token.to_dict()
token_dict = token.model_dump()
# Ensure userId is set to current user
token_dict["userId"] = self.currentUser.id
@ -830,7 +825,7 @@ class AppObjects:
token.createdAt = get_utc_timestamp()
# Convert to dict and ensure all fields are properly set
token_dict = token.to_dict()
token_dict = token.model_dump()
# Ensure userId is set to current user
token_dict["userId"] = self.currentUser.id
@ -1087,7 +1082,7 @@ class AppObjects:
if not filtered_configs:
return None
return DataNeutraliserConfig.from_dict(filtered_configs[0])
return DataNeutraliserConfig(**filtered_configs[0])
except Exception as e:
logger.error(f"Error getting neutralization config: {str(e)}")
@ -1103,11 +1098,11 @@ class AppObjects:
if existing_config:
# Update existing config
update_data = existing_config.to_dict()
update_data = existing_config.model_dump()
update_data.update(config_data)
update_data["updatedAt"] = get_utc_timestamp()
updated_config = DataNeutraliserConfig.from_dict(update_data)
updated_config = DataNeutraliserConfig(**update_data)
self.db.recordModify(
DataNeutraliserConfig, existing_config.id, updated_config
)
@ -1118,10 +1113,10 @@ class AppObjects:
config_data["mandateId"] = self.mandateId
config_data["userId"] = self.userId
new_config = DataNeutraliserConfig.from_dict(config_data)
new_config = DataNeutraliserConfig(**config_data)
created_record = self.db.recordCreate(DataNeutraliserConfig, new_config)
return DataNeutraliserConfig.from_dict(created_record)
return DataNeutraliserConfig(**created_record)
except Exception as e:
logger.error(f"Error creating/updating neutralization config: {str(e)}")
@ -1142,7 +1137,7 @@ class AppObjects:
filtered_attributes = self._uam(DataNeutralizerAttributes, attributes)
return [
DataNeutralizerAttributes.from_dict(attr)
DataNeutralizerAttributes(**attr)
for attr in filtered_attributes
]
@ -1217,7 +1212,7 @@ def getRootInterface() -> AppObjects:
# Convert to User model (use helper compatible with our models)
user_data = users[0]
rootUser = User.from_dict(user_data)
rootUser = User(**user_data)
# Create root interface with the root user
_rootAppObjects = AppObjects(rootUser)

View file

@ -5,7 +5,7 @@ Handles user access management and permission checks.
from typing import Dict, Any, List, Optional
from modules.datamodels.datamodelUam import User, UserPrivilege
from modules.datamodels.datamodelChat import ChatWorkflow, ChatMessage, ChatLog, ChatStat, ChatDocument
from modules.datamodels.datamodelChat import ChatWorkflow
class ChatAccess:
"""

View file

@ -81,23 +81,14 @@ class ChatObjects:
object_fields = {}
# Get field information from the Pydantic model
model_fields = {}
if hasattr(model_class, '__fields__'):
model_fields = model_class.__fields__
elif hasattr(model_class, 'model_fields'):
model_fields = model_class.model_fields
for field_name, value in data.items():
# Check if this field should be stored as JSONB in the database
if field_name in model_fields:
field_info = model_fields[field_name]
# Handle both Pydantic v1 and v2
if hasattr(field_info, 'type_'):
field_type = field_info.type_ # Pydantic v1
elif hasattr(field_info, 'annotation'):
field_type = field_info.annotation # Pydantic v2
else:
field_type = type(value) # Fallback
# Pydantic v2 only
field_type = field_info.annotation
# Always route relational/object fields to object_fields for separate handling
if field_name in ['documents', 'stats']:
@ -511,12 +502,7 @@ class ChatObjects:
# Create documents in normalized documents table
created_documents = []
for doc_data in documents_to_create:
# Convert to dict if it's a Pydantic object
if hasattr(doc_data, 'model_dump'):
doc_dict = doc_data.model_dump() # Pydantic v2
elif hasattr(doc_data, 'to_dict'):
doc_dict = doc_data.to_dict()
else:
# Use the document data directly
doc_dict = doc_data
doc_dict["messageId"] = createdMessage["id"]
@ -626,13 +612,6 @@ class ChatObjects:
documents_data = object_fields['documents']
try:
for doc_data in documents_data:
if hasattr(doc_data, 'model_dump'):
doc_dict = doc_data.model_dump() # Pydantic v2
elif hasattr(doc_data, 'dict'):
doc_dict = doc_data.dict() # Pydantic v1
elif hasattr(doc_data, 'to_dict'):
doc_dict = doc_data.to_dict()
else:
doc_dict = doc_data
doc_dict["messageId"] = messageId
self.createDocument(doc_dict)

View file

@ -5,11 +5,10 @@ Handles user access management and permission checks.
import logging
from typing import Dict, Any, List, Optional
from modules.datamodels.datamodelUam import User, UserInDB
from modules.datamodels.datamodelUam import User
from modules.datamodels.datamodelUtils import Prompt
from modules.datamodels.datamodelFiles import FileItem, FileData
from modules.datamodels.datamodelVoice import VoiceSettings
from modules.datamodels.datamodelChat import ChatWorkflow, ChatMessage, ChatLog
from modules.datamodels.datamodelFiles import FileItem
from modules.datamodels.datamodelChat import ChatWorkflow
# Configure logger
logger = logging.getLogger(__name__)

View file

@ -7,8 +7,7 @@ import os
import logging
import base64
import hashlib
from datetime import datetime, UTC
from typing import Dict, Any, List, Optional, Union
from typing import Dict, Any, List, Optional
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.interfaces.interfaceDbComponentAccess import ComponentAccess
@ -263,7 +262,7 @@ class ComponentObjects:
filteredPrompts = self._uam(Prompt, allPrompts)
# Convert to Prompt objects
return [Prompt.from_dict(prompt) for prompt in filteredPrompts]
return [Prompt(**prompt) for prompt in filteredPrompts]
except Exception as e:
logger.error(f"Error getting prompts: {str(e)}")
@ -276,7 +275,7 @@ class ComponentObjects:
return None
filteredPrompts = self._uam(Prompt, prompts)
return Prompt.from_dict(filteredPrompts[0]) if filteredPrompts else None
return Prompt(**filteredPrompts[0]) if filteredPrompts else None
def createPrompt(self, promptData: Dict[str, Any]) -> Dict[str, Any]:
"""Creates a new prompt if user has permission."""
@ -308,7 +307,7 @@ class ComponentObjects:
if not updatedPrompt:
raise ValueError("Failed to retrieve updated prompt")
return updatedPrompt.to_dict()
return updatedPrompt.model_dump()
except Exception as e:
logger.error(f"Error updating prompt: {str(e)}")
@ -886,7 +885,7 @@ class ComponentObjects:
if not settings_data.get("lastModified"):
settings_data["lastModified"] = get_utc_timestamp()
return VoiceSettings.from_dict(settings_data)
return VoiceSettings(**settings_data)
except Exception as e:
logger.error(f"Error getting voice settings: {str(e)}")
@ -945,7 +944,7 @@ class ComponentObjects:
raise ValueError("Failed to retrieve updated voice settings")
logger.info(f"Updated voice settings for user {userId}")
return updatedSettings.to_dict()
return updatedSettings.model_dump()
except Exception as e:
logger.error(f"Error updating voice settings: {str(e)}")
@ -997,7 +996,7 @@ class ComponentObjects:
}
createdRecord = self.createVoiceSettings(defaultSettings)
return VoiceSettings.from_dict(createdRecord)
return VoiceSettings(**createdRecord)
except Exception as e:
logger.error(f"Error getting or creating voice settings: {str(e)}")

View file

@ -6,7 +6,6 @@ Handles voice operations including speech-to-text, text-to-speech, and translati
import logging
from typing import Dict, Any, Optional, List
from datetime import datetime, UTC
from modules.connectors.connectorVoiceGoogle import ConnectorGoogleSpeech
from modules.datamodels.datamodelVoice import VoiceSettings

View file

@ -6,7 +6,6 @@ import logging
from pathlib import Path as FilePath
from typing import Dict, Any, List
from fastapi import HTTPException, status
from datetime import datetime
from modules.shared.configuration import APP_CONFIG
from modules.security.auth import limiter, getCurrentUser

View file

@ -1,17 +1,11 @@
from fastapi import APIRouter, HTTPException, Depends, Path, Response, Request
from typing import List, Dict, Any
from fastapi import APIRouter, HTTPException, Path, Response, Request
from fastapi import status
import inspect
import importlib
import os
from pydantic import BaseModel
import logging
# Import auth module
from modules.security.auth import limiter, getCurrentUser
from modules.security.auth import limiter
# Import the attribute definition and helper functions
from modules.datamodels.datamodelUam import User
from modules.shared.attributeUtils import getModelClasses, getModelAttributeDefinitions, AttributeResponse, AttributeDefinition
# Configure logger

View file

@ -6,14 +6,12 @@ Implements the endpoints for chat playground workflow management.
import logging
from typing import Optional, Dict, Any
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Query, Request
from datetime import datetime
# Import auth modules
from modules.security.auth import limiter, getCurrentUser
# Import interfaces
import modules.interfaces.interfaceDbChatObjects as interfaceDbChatObjects
from modules.interfaces.interfaceDbChatObjects import getInterface
# Import models
from modules.datamodels.datamodelChat import ChatWorkflow, UserInputRequest

View file

@ -11,14 +11,13 @@ SECURITY NOTE:
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request, Response
from typing import List, Dict, Any, Optional
from fastapi import status
from datetime import datetime
import logging
import json
from modules.datamodels.datamodelUam import User, UserConnection, AuthAuthority, ConnectionStatus
from modules.datamodels.datamodelSecurity import Token
from modules.security.auth import getCurrentUser, limiter
from modules.interfaces.interfaceDbAppObjects import getInterface, getRootInterface
from modules.interfaces.interfaceDbAppObjects import getInterface
from modules.shared.timezoneUtils import get_utc_timestamp
# Configure logger
@ -197,7 +196,7 @@ async def create_connection(
)
# Save connection record - models now handle timestamp serialization automatically
interface.db.recordModify(UserConnection, connection.id, connection.to_dict())
interface.db.recordModify(UserConnection, connection.id, connection.model_dump())
return connection
@ -251,7 +250,7 @@ async def update_connection(
connection.lastChecked = get_utc_timestamp()
# Update connection - models now handle timestamp serialization automatically
interface.db.recordModify(UserConnection, connectionId, connection.to_dict())
interface.db.recordModify(UserConnection, connectionId, connection.model_dump())
# Get token status for the updated connection
@ -386,7 +385,7 @@ async def disconnect_service(
connection.lastChecked = get_utc_timestamp()
# Update connection record - models now handle timestamp serialization automatically
interface.db.recordModify(UserConnection, connectionId, connection.to_dict())
interface.db.recordModify(UserConnection, connectionId, connection.model_dump())
return {"message": "Service disconnected successfully"}

View file

@ -1,14 +1,7 @@
from fastapi import APIRouter, HTTPException, Depends, File, UploadFile, Form, Path, Request, status, Query, Response, Body
from fastapi.responses import JSONResponse, FileResponse
from typing import List, Dict, Any, Optional, Union
from fastapi.responses import JSONResponse
from typing import List, Dict, Any, Optional
import logging
from datetime import datetime, timezone
from dataclasses import dataclass
import io
import inspect
import importlib
import os
from pydantic import BaseModel
# Import auth module
from modules.security.auth import limiter, getCurrentUser
@ -16,7 +9,7 @@ from modules.security.auth import limiter, getCurrentUser
# Import interfaces
import modules.interfaces.interfaceDbComponentObjects as interfaceDbComponentObjects
from modules.datamodels.datamodelFiles import FileItem, FilePreview
from modules.shared.attributeUtils import getModelAttributeDefinitions, AttributeResponse, AttributeDefinition
from modules.shared.attributeUtils import getModelAttributeDefinitions
from modules.datamodels.datamodelUam import User
# Configure logger
@ -103,7 +96,7 @@ async def upload_file(
fileItem.workflowId = workflowId
# Convert FileItem to dictionary for JSON response
fileMeta = fileItem.to_dict()
fileMeta = fileItem.model_dump()
# Response with duplicate information
return JSONResponse({

View file

@ -4,21 +4,16 @@ Implements the endpoints for mandate management.
"""
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request, Response
from typing import List, Dict, Any, Optional
from typing import List, Dict, Any
from fastapi import status
from datetime import datetime
import logging
import inspect
import importlib
import os
from pydantic import BaseModel
# Import auth module
from modules.security.auth import limiter, getCurrentUser
# Import interfaces
import modules.interfaces.interfaceDbAppObjects as interfaceDbAppObjects
from modules.shared.attributeUtils import getModelAttributeDefinitions, AttributeResponse, AttributeDefinition
from modules.shared.attributeUtils import getModelAttributeDefinitions
# Import the model classes
from modules.datamodels.datamodelUam import Mandate, User
@ -136,7 +131,7 @@ async def update_mandate(
)
# Update mandate
updatedMandate = appInterface.updateMandate(mandateId, mandateData.to_dict())
updatedMandate = appInterface.updateMandate(mandateId, mandateData.model_dump())
if not updatedMandate:
raise HTTPException(

View file

@ -1,12 +1,7 @@
from fastapi import APIRouter, HTTPException, Depends, Body, Query, Path, Request, Response
from typing import List, Dict, Any, Optional
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request
from typing import List, Dict, Any
from fastapi import status
from datetime import datetime
import logging
import inspect
import importlib
import os
from pydantic import BaseModel
# Import auth module
from modules.security.auth import limiter, getCurrentUser
@ -14,7 +9,6 @@ from modules.security.auth import limiter, getCurrentUser
# Import interfaces
import modules.interfaces.interfaceDbComponentObjects as interfaceDbComponentObjects
from modules.datamodels.datamodelUtils import Prompt
from modules.shared.attributeUtils import getModelAttributeDefinitions, AttributeResponse, AttributeDefinition
from modules.datamodels.datamodelUam import User
# Configure logger

View file

@ -6,12 +6,7 @@ Implements the endpoints for user management.
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request, Response
from typing import List, Dict, Any, Optional
from fastapi import status
from datetime import datetime
import logging
import inspect
import importlib
import os
from pydantic import BaseModel
# Import interfaces and models
import modules.interfaces.interfaceDbAppObjects as interfaceDbAppObjects
@ -19,8 +14,6 @@ from modules.security.auth import getCurrentUser, limiter, getCurrentUser
# Import the attribute definition and helper functions
from modules.datamodels.datamodelUam import User, UserPrivilege
from modules.shared.attributeUtils import AttributeDefinition
from modules.shared.attributeUtils import getModelAttributeDefinitions, AttributeResponse
# Configure logger
logger = logging.getLogger(__name__)

View file

@ -7,17 +7,14 @@ from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse
import logging
import json
from typing import Dict, Any, Optional
from datetime import datetime, timedelta
from requests_oauthlib import OAuth2Session
import httpx
from modules.shared.configuration import APP_CONFIG
from modules.interfaces.interfaceDbAppObjects import getInterface, getRootInterface
from modules.datamodels.datamodelUam import AuthAuthority, User, ConnectionStatus, UserConnection
from modules.datamodels.datamodelSecurity import Token
from modules.security.auth import getCurrentUser, limiter
from modules.shared.attributeUtils import ModelMixin
from modules.shared.timezoneUtils import get_utc_now, create_expiration_timestamp, get_utc_timestamp
from modules.shared.timezoneUtils import create_expiration_timestamp, get_utc_timestamp
# Configure logger
logger = logging.getLogger(__name__)
@ -377,7 +374,7 @@ async def auth_callback(code: str, state: str, request: Request) -> HTMLResponse
window.opener.postMessage({{
type: 'google_auth_success',
access_token: {json.dumps(token_response["access_token"])},
token_data: {json.dumps(token.to_dict())}
token_data: {json.dumps(token.model_dump())}
}}, '*');
}}
setTimeout(() => window.close(), 1000);
@ -471,7 +468,7 @@ async def auth_callback(code: str, state: str, request: Request) -> HTMLResponse
# Update connection record directly
from modules.datamodels.datamodelUam import UserConnection
rootInterface.db.recordModify(UserConnection, connection_id, connection.to_dict())
rootInterface.db.recordModify(UserConnection, connection_id, connection.model_dump())
# Save token
@ -731,7 +728,7 @@ async def refresh_token(
google_connection.expiresAt = float(current_token.expiresAt) if current_token.expiresAt else google_connection.expiresAt
google_connection.lastChecked = get_utc_timestamp()
google_connection.status = ConnectionStatus.ACTIVE
appInterface.db.recordModify(UserConnection, google_connection.id, google_connection.to_dict())
appInterface.db.recordModify(UserConnection, google_connection.id, google_connection.model_dump())
# Calculate time until expiration
current_time = get_utc_timestamp()

View file

@ -5,12 +5,11 @@ Routes for local security and authentication.
from fastapi import APIRouter, HTTPException, status, Depends, Request, Response, Body
from fastapi.security import OAuth2PasswordRequestForm
import logging
from typing import Dict, Any, Optional
from datetime import datetime, timedelta
from typing import Dict, Any
from datetime import datetime
from fastapi.responses import JSONResponse, HTMLResponse, RedirectResponse
import uuid
from jose import jwt
from pydantic import BaseModel
# Import auth modules
from modules.security.auth import getCurrentUser, limiter, SECRET_KEY, ALGORITHM
@ -18,7 +17,6 @@ from modules.security.jwtService import createAccessToken, createRefreshToken, s
from modules.interfaces.interfaceDbAppObjects import getInterface, getRootInterface
from modules.datamodels.datamodelUam import User, UserInDB, AuthAuthority, UserPrivilege
from modules.datamodels.datamodelSecurity import Token
from modules.shared.attributeUtils import ModelMixin
# Configure logger
logger = logging.getLogger(__name__)

View file

@ -7,7 +7,6 @@ from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse
import logging
import json
from typing import Dict, Any, Optional
from datetime import datetime, timedelta
import msal
import httpx
@ -17,8 +16,7 @@ from modules.datamodels.datamodelUam import AuthAuthority, User, ConnectionStatu
from modules.datamodels.datamodelSecurity import Token
from modules.security.auth import getCurrentUser, limiter
from modules.security.jwtService import createAccessToken
from modules.shared.attributeUtils import ModelMixin
from modules.shared.timezoneUtils import get_utc_now, create_expiration_timestamp, get_utc_timestamp
from modules.shared.timezoneUtils import create_expiration_timestamp, get_utc_timestamp
# Configure logger
logger = logging.getLogger(__name__)
@ -234,7 +232,7 @@ async def auth_callback(code: str, state: str, request: Request) -> HTMLResponse
appInterface.saveAccessToken(jwt_token_obj)
# Convert token to dict and ensure proper timestamp handling
token_dict = jwt_token_obj.to_dict()
token_dict = jwt_token_obj.model_dump()
# Remove datetime conversion logic - models now handle this automatically
# The token model already returns float timestamps
@ -341,7 +339,7 @@ async def auth_callback(code: str, state: str, request: Request) -> HTMLResponse
connection.externalEmail = user_info.get("mail")
# Update connection record directly
rootInterface.db.recordModify(UserConnection, connection_id, connection.to_dict())
rootInterface.db.recordModify(UserConnection, connection_id, connection.model_dump())
# Save token
@ -588,7 +586,7 @@ async def refresh_token(
msft_connection.status = ConnectionStatus.ACTIVE
# Save updated connection
appInterface.db.recordModify(UserConnection, msft_connection.id, msft_connection.to_dict())
appInterface.db.recordModify(UserConnection, msft_connection.id, msft_connection.model_dump())
# Calculate time until expiration
current_time = get_utc_timestamp()

View file

@ -4,17 +4,14 @@ Replaces Azure voice services with Google Cloud Speech-to-Text and Translation
Includes WebSocket support for real-time voice streaming
"""
import os
import logging
import json
import base64
import asyncio
from fastapi import APIRouter, File, Form, UploadFile, Depends, HTTPException, Body, WebSocket, WebSocketDisconnect
from fastapi.responses import Response
from typing import Optional, Dict, Any, List
from modules.security.auth import getCurrentUser
from modules.datamodels.datamodelUam import User
from modules.interfaces.interfaceDbComponentObjects import getInterface
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface, VoiceObjects
logger = logging.getLogger(__name__)
@ -391,7 +388,7 @@ async def get_voice_settings(current_user: User = Depends(getCurrentUser)):
return {
"success": True,
"data": {
"user_settings": voice_settings.to_dict(),
"user_settings": voice_settings.model_dump(),
"default_settings": {
"sttLanguage": "de-DE",
"ttsLanguage": "de-DE",

View file

@ -3,13 +3,9 @@ Workflow routes for the backend API.
Implements the endpoints for workflow management according to the state machine.
"""
import os
import json
import logging
from typing import List, Dict, Any, Optional
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Query, Response, status, Request
from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse
from datetime import datetime, timedelta
# Import auth modules
from modules.security.auth import limiter, getCurrentUser
@ -28,7 +24,6 @@ from modules.datamodels.datamodelChat import (
)
from modules.shared.attributeUtils import getModelAttributeDefinitions, AttributeResponse
from modules.datamodels.datamodelUam import User
from modules.shared.timezoneUtils import get_utc_timestamp
# Configure logger

View file

@ -3,8 +3,6 @@ Authentication module for backend API.
Handles JWT-based authentication, token generation, and user context.
"""
from datetime import datetime, timedelta, timezone
import uuid
from typing import Optional, Dict, Any, Tuple
from fastapi import Depends, HTTPException, status, Request, Response
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
@ -14,7 +12,6 @@ from slowapi import Limiter
from slowapi.util import get_remote_address
from modules.shared.configuration import APP_CONFIG
from modules.shared.timezoneUtils import get_utc_now, get_utc_timestamp
from modules.interfaces.interfaceDbAppObjects import getRootInterface
from modules.datamodels.datamodelUam import User, AuthAuthority
from modules.datamodels.datamodelSecurity import Token

View file

@ -5,7 +5,6 @@ Handles all token operations including automatic refresh for backend services.
import logging
import httpx
from datetime import datetime
from typing import Optional, Dict, Any, Callable
from modules.datamodels.datamodelSecurity import Token

View file

@ -8,7 +8,6 @@ when API endpoints are accessed, providing seamless user experience.
import logging
from fastapi import Request, Response
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.responses import Response as StarletteResponse
from typing import Callable
import asyncio
from modules.security.tokenRefreshService import token_refresh_service

View file

@ -7,11 +7,8 @@ to ensure users don't experience token expiration issues.
"""
import logging
from typing import Optional, Dict, Any, List
from datetime import datetime, timedelta
from modules.interfaces.interfaceDbAppObjects import getInterface
from modules.datamodels.datamodelUam import User, UserConnection, AuthAuthority
from modules.datamodels.datamodelSecurity import Token
from typing import Dict, Any
from modules.datamodels.datamodelUam import UserConnection, AuthAuthority
from modules.shared.timezoneUtils import get_utc_timestamp
from modules.shared.auditLogger import audit_logger

View file

@ -1,16 +1,14 @@
import logging
from typing import Dict, Any, List, Optional, Tuple, Union
from typing import Dict, Any, List, Optional, Union
from modules.datamodels.datamodelChat import PromptPlaceholder, ChatDocument
from modules.services.serviceExtraction.mainServiceExtraction import ExtractionService
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
from modules.datamodels.datamodelExtraction import ChunkResult, ContentExtracted
from modules.aicore.aicorePluginTavily import WebResearchRequest, WebResearchResult
from modules.interfaces.interfaceAiObjects import AiObjects
from modules.services.serviceAi.subCoreAi import SubCoreAi
from modules.services.serviceAi.subDocumentProcessing import SubDocumentProcessing
from modules.services.serviceAi.subWebResearch import SubWebResearch
from modules.services.serviceAi.subDocumentGeneration import SubDocumentGeneration
from modules.services.serviceAi.subSharedAiUtils import sanitizePromptContent
logger = logging.getLogger(__name__)

View file

@ -3,7 +3,6 @@ import logging
from typing import Dict, Any, List, Optional, Tuple, Union
from modules.datamodels.datamodelChat import PromptPlaceholder, ChatDocument
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum, ProcessingModeEnum
from modules.datamodels.datamodelExtraction import ContentPart
from modules.services.serviceAi.subSharedAiUtils import (
buildPromptWithPlaceholders,
extractTextFromContentParts,

View file

@ -3,9 +3,9 @@ import json
import logging
import time
from datetime import datetime, UTC
from typing import Dict, Any, List, Optional, Tuple, Union
from typing import Dict, Any, List, Optional
from modules.datamodels.datamodelChat import ChatDocument
from modules.datamodels.datamodelAi import AiCallOptions, OperationTypeEnum
from modules.datamodels.datamodelAi import AiCallOptions
logger = logging.getLogger(__name__)

View file

@ -2,7 +2,7 @@ import json
import logging
import re
import time
from typing import Dict, Any, List, Optional, Tuple, Union
from typing import Dict, Any, List, Optional
from modules.datamodels.datamodelChat import ChatDocument
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
from modules.datamodels.datamodelExtraction import ChunkResult, ContentExtracted, PartResult, ExtractionOptions, MergeStrategy

View file

@ -8,7 +8,6 @@ to maintain DRY principles and ensure consistency.
import re
import logging
from typing import Dict, Any, List, Optional, Union
from modules.datamodels.datamodelChat import PromptPlaceholder
logger = logging.getLogger(__name__)

View file

@ -1,7 +1,6 @@
import logging
from typing import Dict, Any, List, Optional, Tuple, Union
from typing import Optional
from modules.aicore.aicorePluginTavily import WebResearchRequest, WebResearchResult
from modules.interfaces.interfaceAiObjects import AiObjects
from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__)

View file

@ -1,7 +1,7 @@
"""
Intelligent Token-Aware Merger for optimizing AI calls based on LLM token limits.
"""
from typing import List, Dict, Any, Tuple
from typing import List, Dict, Any
import logging
from modules.datamodels.datamodelExtraction import ContentPart
from .subUtils import makeId

View file

@ -4,7 +4,6 @@ Renderer registry for automatic discovery and registration of renderers.
import logging
import importlib
import pkgutil
from typing import Dict, Type, List, Optional
from .rendererBaseTemplate import BaseRenderer

View file

@ -4,8 +4,6 @@ CSV renderer for report generation.
from .rendererBaseTemplate import BaseRenderer
from typing import Dict, Any, Tuple, List
import csv
import io
class RendererCsv(BaseRenderer):
"""Renders content to CSV format with format-specific extraction."""

View file

@ -4,7 +4,6 @@ Image renderer for report generation using AI image generation.
from .rendererBaseTemplate import BaseRenderer
from typing import Dict, Any, Tuple, List
import base64
import logging
logger = logging.getLogger(__name__)

View file

@ -6,7 +6,6 @@ from .rendererBaseTemplate import BaseRenderer
from typing import Dict, Any, Tuple, List
import io
import base64
from datetime import datetime, UTC
try:
from reportlab.lib.pagesizes import letter, A4

View file

@ -1,7 +1,6 @@
import json
import os
from typing import Any, Dict, List, Set
from datetime import datetime, UTC
class NormalizationService:

View file

@ -1,11 +1,9 @@
"""Connector for SharePoint operations using Microsoft Graph API."""
import logging
import json
import aiohttp
import asyncio
from typing import Dict, Any, List, Optional
from datetime import datetime, UTC
logger = logging.getLogger(__name__)

View file

@ -4,7 +4,6 @@ Provides centralized access to configuration, events, and other utilities.
"""
import logging
import os
from typing import Any, Optional, Dict, Callable, List
from modules.shared.configuration import APP_CONFIG
from modules.shared.eventManagement import eventManager

View file

@ -1,5 +1,4 @@
import logging
import uuid
from typing import Dict, Any, List, Optional
from modules.datamodels.datamodelUam import User, UserConnection
from modules.datamodels.datamodelChat import ChatDocument, ChatMessage, ChatStat, ChatLog

View file

@ -3,52 +3,14 @@ Shared utilities for model attributes and labels.
"""
from pydantic import BaseModel, Field, ConfigDict
from typing import Dict, Any, List, Type, Optional, Union
from typing import Dict, Any, List, Type, Optional
import inspect
import importlib
import os
from datetime import datetime
class ModelMixin:
"""Mixin class that provides serialization methods for Pydantic models."""
def to_dict(self) -> Dict[str, Any]:
"""
Convert a Pydantic model to a dictionary.
Handles both Pydantic v1 and v2.
All timestamp fields remain as float values.
Returns:
Dict[str, Any]: Dictionary representation of the model
"""
# Get the raw dictionary
if hasattr(self, "model_dump"):
data: Dict[str, Any] = self.model_dump() # Pydantic v2
else:
data: Dict[str, Any] = self.dict() # Pydantic v1
# All fields (including timestamps) remain in their original format
# No conversions needed - timestamps are already float
return data
@classmethod
def from_dict(cls, data: Dict[str, Any]) -> "ModelMixin":
"""
Create a Pydantic model instance from a dictionary.
Args:
data: Dictionary containing the model data
Returns:
ModelMixin: New instance of the model class
"""
return cls(**data)
# Define the AttributeDefinition class here instead of importing it
class AttributeDefinition(BaseModel, ModelMixin):
class AttributeDefinition(BaseModel):
"""Definition of a model attribute with its metadata."""
name: str
@ -72,39 +34,7 @@ class AttributeDefinition(BaseModel, ModelMixin):
MODEL_LABELS: Dict[str, Dict[str, Dict[str, str]]] = {}
def to_dict(model: BaseModel) -> Dict[str, Any]:
"""
Convert a Pydantic model to a dictionary.
Handles both Pydantic v1 and v2.
Args:
model: The Pydantic model instance to convert
Returns:
Dict[str, Any]: Dictionary representation of the model
"""
if hasattr(model, "model_dump"):
return model.model_dump() # Pydantic v2
return model.dict() # Pydantic v1
def from_dict(model_class: Type[BaseModel], data: Dict[str, Any]) -> BaseModel:
"""
Create a Pydantic model instance from a dictionary.
Args:
model_class: The Pydantic model class to instantiate
data: Dictionary containing the model data
Returns:
BaseModel: New instance of the model class
"""
return model_class(**data)
def register_model_labels(
model_name: str, model_label: Dict[str, str], labels: Dict[str, Dict[str, str]]
):
def register_model_labels(model_name: str, model_label: Dict[str, str], labels: Dict[str, Dict[str, str]]):
"""
Register labels for a model's attributes and the model itself.
@ -154,9 +84,7 @@ def get_model_label(model_name: str, language: str = "en") -> str:
return model_label.get(language, model_label.get("en", model_name))
def getModelAttributeDefinitions(
modelClass: Type[BaseModel] = None, userLanguage: str = "en"
) -> Dict[str, Any]:
def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguage: str = "en") -> Dict[str, Any]:
"""
Get attribute definitions for a model class.
@ -175,8 +103,7 @@ def getModelAttributeDefinitions(
labels = get_model_labels(model_name, userLanguage)
model_label = get_model_label(model_name, userLanguage)
# Handle both Pydantic v1 and v2
if hasattr(modelClass, "model_fields"): # Pydantic v2
# Pydantic v2 only
fields = modelClass.model_fields
for name, field in fields.items():
# Extract frontend metadata from field info
@ -241,79 +168,6 @@ def getModelAttributeDefinitions(
"options": frontend_options,
}
)
else: # Pydantic v1
fields = modelClass.__fields__
for name, field in fields.items():
# Extract frontend metadata from field info
field_info = field.field_info if hasattr(field, "field_info") else None
# Check both direct attributes and extra field for frontend metadata
frontend_type = None
frontend_readonly = False
frontend_required = field.required
frontend_options = None
if field_info:
# Try direct attributes first
frontend_type = getattr(field_info, "frontend_type", None)
frontend_readonly = getattr(field_info, "frontend_readonly", False)
frontend_required = getattr(
field_info, "frontend_required", frontend_required
)
frontend_options = getattr(field_info, "frontend_options", None)
# If not found, check extra field
if hasattr(field_info, "extra") and field_info.extra:
if frontend_type is None:
frontend_type = field_info.extra.get("frontend_type")
if not frontend_readonly:
frontend_readonly = field_info.extra.get(
"frontend_readonly", False
)
if (
frontend_required == field.required
): # Only override if we didn't get it from direct attribute
frontend_required = field_info.extra.get(
"frontend_required", frontend_required
)
if frontend_options is None:
frontend_options = field_info.extra.get("frontend_options")
# Use frontend type if available, otherwise fall back to Python type
# Handle both Pydantic v1 and v2
if hasattr(field, 'type_'):
field_annotation = field.type_ # Pydantic v1
elif hasattr(field, 'annotation'):
field_annotation = field.annotation # Pydantic v2
else:
field_annotation = type(None) # Fallback
field_type = (
frontend_type
if frontend_type
else (
field_annotation.__name__
if hasattr(field_annotation, "__name__")
else str(field_annotation)
)
)
attributes.append(
{
"name": name,
"type": field_type,
"required": frontend_required,
"description": field.field_info.description
if hasattr(field.field_info, "description")
else "",
"label": labels.get(name, name),
"placeholder": f"Please enter {labels.get(name, name)}",
"editable": not frontend_readonly,
"visible": True,
"order": len(attributes),
"readonly": frontend_readonly,
"options": frontend_options,
}
)
return {"model": model_label, "attributes": attributes}

View file

@ -3,8 +3,7 @@ Timezone utilities for consistent timestamp handling across the gateway.
Ensures all timestamps are properly handled as UTC.
"""
from datetime import datetime, timezone, timedelta
from typing import Union, Optional
from datetime import datetime, timezone
import time
def get_utc_now() -> datetime:
@ -25,21 +24,6 @@ def get_utc_timestamp() -> float:
"""
return time.time()
def to_utc_timestamp(dt: datetime) -> float:
"""
Convert datetime object to UTC timestamp.
Args:
dt (datetime): Datetime object to convert
Returns:
float: UTC timestamp in seconds
"""
if dt.tzinfo is None:
# If naive datetime, assume it's UTC
dt = dt.replace(tzinfo=timezone.utc)
return dt.timestamp()
def create_expiration_timestamp(expires_in_seconds: int) -> float:
"""
Create a new expiration timestamp from seconds until expiration.

View file

@ -1,7 +1,5 @@
from enum import Enum
from typing import Dict, List, Optional, Any, Literal
from datetime import datetime, UTC
from pydantic import BaseModel, Field
import logging
from functools import wraps

View file

@ -4,11 +4,9 @@ Handles SharePoint document operations using the SharePoint service.
"""
import logging
import json
import re
from typing import Dict, Any, List, Optional
from datetime import datetime, UTC
import base64
from urllib.parse import urlparse
import aiohttp
import asyncio

View file

@ -1,7 +1,6 @@
# adaptiveLearningEngine.py
# Enhanced learning engine that tracks validation patterns and adapts prompts
import json
import logging
from typing import Dict, Any, List, Optional
from datetime import datetime, timezone

View file

@ -3,7 +3,6 @@
import logging
import json
import re
from typing import List, Dict, Any
logger = logging.getLogger(__name__)

View file

@ -1,7 +1,6 @@
# learningEngine.py
# Learning engine for adaptive React mode
import json
import logging
from typing import Dict, Any, List
from datetime import datetime, timezone

View file

@ -12,7 +12,6 @@ from modules.datamodels.datamodelChat import (
ActionResult
)
from modules.datamodels.datamodelChat import ChatWorkflow
from modules.datamodels.datamodelAi import AiCallOptions, OperationTypeEnum, ProcessingModeEnum, PriorityEnum
from modules.workflows.processing.modes.modeBase import BaseMode
from modules.workflows.processing.shared.executionState import TaskExecutionState, shouldContinue
from modules.workflows.processing.shared.promptGenerationActionsReact import (

View file

@ -3,7 +3,6 @@
import logging
from typing import List
from datetime import datetime, UTC
from modules.datamodels.datamodelChat import TaskStep
from modules.datamodels.datamodelChat import ActionResult

View file

@ -1,13 +1,11 @@
# methodDiscovery.py
# Method discovery and management for workflow execution
import json
import logging
import importlib
import pkgutil
import inspect
from typing import Any, Dict, List
from modules.datamodels.datamodelChat import TaskContext, ReviewContext, DocumentExchange
from modules.workflows.methods.methodBase import MethodBase
# Set up logger

View file

@ -32,7 +32,6 @@ Following placeholders are populated directly by prompt builders with according
import json
import logging
from typing import Dict, Any, List
from modules.datamodels.datamodelChat import ChatDocument
logger = logging.getLogger(__name__)
from modules.workflows.processing.shared.methodDiscovery import (methods, discoverMethods)

View file

@ -3,7 +3,6 @@ Actionplan Mode Prompt Generation
Handles prompt templates and extraction functions for actionplan mode action handling.
"""
import json
import logging
from typing import Dict, Any, List
from modules.datamodels.datamodelChat import PromptBundle, PromptPlaceholder

View file

@ -3,7 +3,6 @@ Task Planning Prompt Generation
Handles prompt templates and extraction functions for task planning phase.
"""
import json
import logging
from typing import Dict, Any, List
from modules.datamodels.datamodelChat import PromptBundle, PromptPlaceholder

View file

@ -3,7 +3,7 @@
import logging
from typing import Dict, Any, Optional, List
from modules.datamodels.datamodelChat import TaskStep, TaskContext, TaskPlan, TaskResult, ReviewResult
from modules.datamodels.datamodelChat import TaskStep, TaskContext, TaskPlan, TaskResult
from modules.datamodels.datamodelChat import ChatWorkflow
from modules.workflows.processing.modes.modeBase import BaseMode
from modules.workflows.processing.modes.modeActionplan import ActionplanMode
@ -289,7 +289,7 @@ class WorkflowProcessor:
if hasattr(taskResult, 'met_criteria'):
# This is a ReviewResult object
met = taskResult.met_criteria if taskResult.met_criteria else []
reviewResult = taskResult.to_dict()
reviewResult = taskResult.model_dump()
else:
# This is a TaskResult object
met = []
@ -302,7 +302,7 @@ class WorkflowProcessor:
handoverData = {
'task_id': taskStep.id,
'task_description': taskStep.objective,
'actions': [action.to_dict() for action in taskActions] if taskActions else [],
'actions': [action.model_dump() for action in taskActions] if taskActions else [],
'review_result': reviewResult,
'workflow_id': workflow.id,
'handover_time': self.services.utils.timestampGetUtc()

View file

@ -10,7 +10,7 @@ from modules.datamodels.datamodelChat import (
ChatWorkflow,
ChatDocument
)
from modules.datamodels.datamodelChat import TaskItem, TaskStatus, TaskContext
from modules.datamodels.datamodelChat import TaskContext
from modules.workflows.processing.workflowProcessor import WorkflowProcessor, WorkflowStoppedException

View file

@ -21,7 +21,6 @@ Usage:
"""
import sys
import os
import argparse
import shutil
from pathlib import Path