From b97670d9398d0b2114e95f1a50ba150bc106d1ce Mon Sep 17 00:00:00 2001
From: ValueOn AG
Date: Wed, 15 Oct 2025 18:23:03 +0200
Subject: [PATCH] Adapted chatbot integration
---
app.py | 108 +++-------
modules/connectors/connectorDbPostgre.py | 6 +
.../chatBot/{service.py => mainChatBot.py} | 192 +++++++++++++++++-
.../{database.py => subChatbotDatabase.py} | 0
modules/features/featuresLifecycle.py | 26 +++
modules/features/init.py | 14 --
modules/interfaces/interfaceDbAppObjects.py | 33 ++-
modules/interfaces/interfaceDbChatObjects.py | 40 ++--
modules/routes/routeChatbot.py | 22 +-
modules/routes/routeDataPrompts.py | 7 +-
modules/routes/routeDataUsers.py | 10 +-
.../services/serviceExtraction/subRegistry.py | 2 -
.../serviceWorkflow/mainServiceWorkflow.py | 40 +---
modules/shared/attributeUtils.py | 14 +-
.../workflows/processing/modes/modeReact.py | 10 +-
requirements.txt | 3 +
16 files changed, 336 insertions(+), 191 deletions(-)
rename modules/features/chatBot/{service.py => mainChatBot.py} (81%)
rename modules/features/chatBot/{database.py => subChatbotDatabase.py} (100%)
create mode 100644 modules/features/featuresLifecycle.py
delete mode 100644 modules/features/init.py
diff --git a/app.py b/app.py
index 384d6bfa..fc29fe5b 100644
--- a/app.py
+++ b/app.py
@@ -2,30 +2,21 @@ import os
import sys
import asyncio
from urllib.parse import quote_plus
-from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
-from modules.features.chatBot.database import init_models as init_chatbot_models
os.environ["NUMEXPR_MAX_THREADS"] = "12"
-# Fix for Windows asyncio compatibility with psycopg
-if sys.platform == "win32":
- asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
-
-from fastapi import FastAPI, HTTPException, Depends, Body, status, Response
+from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
-from fastapi.openapi.models import OAuthFlows as OAuthFlowsModel
from fastapi.security import HTTPBearer
from contextlib import asynccontextmanager
-
import logging
from logging.handlers import RotatingFileHandler
-from datetime import timedelta, datetime
-import pathlib
+from datetime import datetime
from modules.shared.configuration import APP_CONFIG
from modules.shared.eventManagement import eventManager
-
+from modules.features import featuresLifecycle as featuresLifecycle
class DailyRotatingFileHandler(RotatingFileHandler):
"""
@@ -169,6 +160,24 @@ def initLogging():
)
return True
+ # Add filter to normalize problematic unicode (e.g., arrows) to ASCII for terminals like cp1252
+ class UnicodeArrowFilter(logging.Filter):
+ def filter(self, record):
+ if isinstance(record.msg, str):
+ translation_map = {
+ "\u2192": "->", # rightwards arrow
+ "\u2190": "<-", # leftwards arrow
+ "\u2194": "<->", # left right arrow
+ "\u21D2": "=>", # rightwards double arrow
+ "\u21D0": "<=", # leftwards double arrow
+ "\u21D4": "<=>", # left right double arrow
+ "\u00AB": "<<", # left-pointing double angle quotation mark
+ "\u00BB": ">>", # right-pointing double angle quotation mark
+ }
+ for u, ascii_eq in translation_map.items():
+ record.msg = record.msg.replace(u, ascii_eq)
+ return True
+
# Configure handlers based on config
handlers = []
@@ -180,6 +189,7 @@ def initLogging():
consoleHandler.addFilter(HttpcoreStarFilter())
consoleHandler.addFilter(HTTPDebugFilter())
consoleHandler.addFilter(EmojiFilter())
+ consoleHandler.addFilter(UnicodeArrowFilter())
handlers.append(consoleHandler)
# Add file handler if enabled
@@ -195,12 +205,14 @@ def initLogging():
filename_prefix="log_app",
max_bytes=rotationSize,
backup_count=backupCount,
+ encoding="utf-8",
)
fileHandler.setFormatter(fileFormatter)
fileHandler.addFilter(ChromeDevToolsFilter())
fileHandler.addFilter(HttpcoreStarFilter())
fileHandler.addFilter(HTTPDebugFilter())
fileHandler.addFilter(EmojiFilter())
+ fileHandler.addFilter(UnicodeArrowFilter())
handlers.append(fileHandler)
# Configure the root logger
@@ -247,6 +259,9 @@ def make_sqlalchemy_db_url() -> str:
db = APP_CONFIG.get("SQLALCHEMY_DB_DATABASE", "project_gateway")
user = APP_CONFIG.get("SQLALCHEMY_DB_USER", "postgres")
pwd = quote_plus(APP_CONFIG.get("SQLALCHEMY_DB_PASSWORD_SECRET", ""))
+ # On Windows, prefer asyncpg to avoid psycopg + ProactorEventLoop incompatibility
+ if sys.platform == "win32":
+ return f"postgresql+asyncpg://{user}:{pwd}@{host}:{port}/{db}"
return f"postgresql+psycopg://{user}:{pwd}@{host}:{port}/{db}"
@@ -261,56 +276,15 @@ instanceLabel = APP_CONFIG.get("APP_ENV_LABEL")
async def lifespan(app: FastAPI):
logger.info("Application is starting up")
- # --- Init SQLAlchemy ---
-
- engine = create_async_engine(
- make_sqlalchemy_db_url(), pool_pre_ping=True, echo=False
- )
- SessionLocal = async_sessionmaker(
- engine, expire_on_commit=False, class_=AsyncSession
- )
- app.state.checkpoint_engine = engine
- app.state.checkpoint_sessionmaker = SessionLocal
-
- # NOTE: Might need Alembic migrations in the future
- await init_chatbot_models(engine)
-
- # --- Sync tools from registry to database ---
- from modules.features.chatBot.database import sync_tools_from_registry
-
- async with SessionLocal() as session:
- await sync_tools_from_registry(session)
- await session.commit()
- logger.info("Tools synced from registry to database")
-
- # --- Initialize LangGraph checkpointer ---
-
- from modules.features.chatBot.utils.checkpointer import (
- initialize_checkpointer,
- close_checkpointer,
- )
-
- try:
- await initialize_checkpointer()
- logger.info("LangGraph checkpointer initialized successfully")
- except Exception as e:
- logger.error(f"Failed to initialize LangGraph checkpointer: {str(e)}")
- # Continue startup even if checkpointer fails to initialize
-
- # --- Init Event Manager ---
+ # --- Init Managers ---
+ await featuresLifecycle.start()
eventManager.start()
yield
- # --- Cleanup Event Manager ---
+ # --- Stop Managers ---
eventManager.stop()
-
- # --- Cleanup LangGraph checkpointer ---
- await close_checkpointer()
-
- # --- Cleanup SQLAlchemy ---
- await engine.dispose()
-
+ await featuresLifecycle.stop()
logger.info("Application has been shut down")
@@ -401,70 +375,52 @@ app.add_middleware(
ProactiveTokenRefreshMiddleware, enabled=True, check_interval_minutes=5
)
-# Run triggered features
-import modules.features.init
-
# Include all routers
-from modules.routes.routeAdmin import router as generalRouter
+from modules.routes.routeAdmin import router as generalRouter
app.include_router(generalRouter)
from modules.routes.routeAttributes import router as attributesRouter
-
app.include_router(attributesRouter)
from modules.routes.routeDataMandates import router as mandateRouter
-
app.include_router(mandateRouter)
from modules.routes.routeDataUsers import router as userRouter
-
app.include_router(userRouter)
from modules.routes.routeDataFiles import router as fileRouter
-
app.include_router(fileRouter)
from modules.routes.routeDataNeutralization import router as neutralizationRouter
-
app.include_router(neutralizationRouter)
from modules.routes.routeDataPrompts import router as promptRouter
-
app.include_router(promptRouter)
from modules.routes.routeDataConnections import router as connectionsRouter
-
app.include_router(connectionsRouter)
from modules.routes.routeWorkflows import router as workflowRouter
-
app.include_router(workflowRouter)
from modules.routes.routeChatPlayground import router as chatPlaygroundRouter
-
app.include_router(chatPlaygroundRouter)
from modules.routes.routeSecurityLocal import router as localRouter
-
app.include_router(localRouter)
from modules.routes.routeSecurityMsft import router as msftRouter
-
app.include_router(msftRouter)
from modules.routes.routeSecurityGoogle import router as googleRouter
-
app.include_router(googleRouter)
from modules.routes.routeVoiceGoogle import router as voiceGoogleRouter
-
app.include_router(voiceGoogleRouter)
from modules.routes.routeSecurityAdmin import router as adminSecurityRouter
-
app.include_router(adminSecurityRouter)
from modules.routes.routeChatbot import router as chatbotRouter
-
app.include_router(chatbotRouter)
diff --git a/modules/connectors/connectorDbPostgre.py b/modules/connectors/connectorDbPostgre.py
index dd08f4e0..e01e267b 100644
--- a/modules/connectors/connectorDbPostgre.py
+++ b/modules/connectors/connectorDbPostgre.py
@@ -537,6 +537,12 @@ class DatabaseConnector:
except (json.JSONDecodeError, TypeError):
# If not valid JSON, convert to JSON string
value = json.dumps(value)
+ elif hasattr(value, 'model_dump'):
+ # Handle Pydantic v2 models
+ value = json.dumps(value.model_dump())
+ elif hasattr(value, 'dict'):
+ # Handle Pydantic v1 models
+ value = json.dumps(value.dict())
else:
# Convert other types to JSON
value = json.dumps(value)
diff --git a/modules/features/chatBot/service.py b/modules/features/chatBot/mainChatBot.py
similarity index 81%
rename from modules/features/chatBot/service.py
rename to modules/features/chatBot/mainChatBot.py
index 8239ba53..2d44e422 100644
--- a/modules/features/chatBot/service.py
+++ b/modules/features/chatBot/mainChatBot.py
@@ -1,18 +1,21 @@
"""Service layer for chatbot functionality."""
import json
+import asyncio
import logging
from datetime import datetime, timezone
+import sys
from typing import AsyncIterator, List, Optional
from sqlalchemy import select, update, delete
-from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker
+from sqlalchemy.exc import OperationalError
from modules.features.chatBot.domain.chatbot import Chatbot, get_langchain_model
from modules.features.chatBot.utils.checkpointer import get_checkpointer
from modules.features.chatBot.utils.toolRegistry import get_registry
from modules.features.chatBot.utils import permissions
-from modules.features.chatBot.database import UserThreadMapping
+from modules.features.chatBot.subChatbotDatabase import UserThreadMapping
from modules.datamodels.datamodelChatbot import (
MessageItem,
ChatMessageResponse,
@@ -28,6 +31,179 @@ from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__)
+_closeCheckpointerCallable = None # set when start() initializes checkpointer
+_engine = None
+_SessionLocal = None
+
+
+def _make_sqlalchemy_db_url() -> str:
+ from urllib.parse import quote_plus
+
+ host = APP_CONFIG.get("SQLALCHEMY_DB_HOST", "localhost")
+ port = APP_CONFIG.get("SQLALCHEMY_DB_PORT", "5432")
+ db = APP_CONFIG.get("SQLALCHEMY_DB_DATABASE", "project_gateway")
+ user = APP_CONFIG.get("SQLALCHEMY_DB_USER", "postgres")
+ pwd = quote_plus(APP_CONFIG.get("SQLALCHEMY_DB_PASSWORD_SECRET", ""))
+ if sys.platform == "win32":
+ return f"postgresql+asyncpg://{user}:{pwd}@{host}:{port}/{db}"
+ return f"postgresql+psycopg://{user}:{pwd}@{host}:{port}/{db}"
+
+
+def _create_engine_with_pool() -> tuple:
+ """Create async SQLAlchemy engine and sessionmaker with resilient pool settings."""
+ db_url = _make_sqlalchemy_db_url()
+
+ # Pool tuning with sensible defaults; overridable via config
+ pool_size = int(APP_CONFIG.get("SQLALCHEMY_POOL_SIZE", 5))
+ max_overflow = int(APP_CONFIG.get("SQLALCHEMY_MAX_OVERFLOW", 10))
+ pool_recycle = int(APP_CONFIG.get("SQLALCHEMY_POOL_RECYCLE_SECONDS", 300))
+ pool_timeout = int(APP_CONFIG.get("SQLALCHEMY_POOL_TIMEOUT_SECONDS", 30))
+ connect_timeout = int(APP_CONFIG.get("SQLALCHEMY_CONNECT_TIMEOUT_SECONDS", 10))
+
+ engine = create_async_engine(
+ db_url,
+ pool_pre_ping=True,
+ pool_size=pool_size,
+ max_overflow=max_overflow,
+ pool_recycle=pool_recycle,
+ pool_timeout=pool_timeout,
+ echo=False,
+ connect_args={
+ # asyncpg understands timeout; psycopg ignores unknown args safely
+ "timeout": connect_timeout,
+ },
+ )
+ session_local = async_sessionmaker(engine, expire_on_commit=False, class_=AsyncSession)
+ return engine, session_local
+
+
+async def start() -> None:
+ """Initialize ChatBot feature at application startup.
+
+ - Creates tables if needed
+ - Syncs tool registry to database
+ - Initializes LangGraph checkpointer (except in dev)
+ """
+ global _engine, _SessionLocal
+
+ from modules.features.chatBot.subChatbotDatabase import init_models as _initModels
+ from modules.features.chatBot.subChatbotDatabase import (
+ sync_tools_from_registry as _syncToolsFromRegistry,
+ )
+
+ # Ensure Windows uses SelectorEventLoop for better DB driver compatibility
+ if sys.platform == "win32":
+ return
+
+ try:
+ if _engine is None:
+ _engine, _SessionLocal = _create_engine_with_pool()
+
+ # Ensure DB schema exists with retry (handles transient startup issues)
+ await _initModelsWithRetry(_engine, _initModels)
+
+ # Sync tools into DB
+ async with _SessionLocal() as session:
+ await _syncToolsFromRegistry(session)
+ await session.commit()
+ logger.info("ChatBot tools synced from registry to database")
+ except Exception as exc:
+ logger.error(
+ f"ChatBot startup failed: {type(exc).__name__}: {str(exc)}",
+ exc_info=True,
+ )
+ # Intentionally swallow to avoid aborting app startup
+ return
+
+ # Initialize LangGraph checkpointer (skip in dev)
+ global _closeCheckpointerCallable
+ isDev = str(APP_CONFIG.get("APP_ENV_LABEL")).lower() in ("dev", "development")
+ if not isDev:
+ try:
+ from modules.features.chatBot.utils.checkpointer import (
+ initialize_checkpointer as _initializeCheckpointer,
+ close_checkpointer as _closeCheckpointer,
+ )
+
+ await _initializeCheckpointer()
+ _closeCheckpointerCallable = _closeCheckpointer
+ logger.info("LangGraph checkpointer initialized successfully (ChatBot)")
+ except Exception as e:
+ logger.error(
+ f"Failed to initialize LangGraph checkpointer (ChatBot): {str(e)}"
+ )
+ _closeCheckpointerCallable = None
+ else:
+ _closeCheckpointerCallable = None
+ logger.info("LangGraph checkpointer disabled in dev environment (ChatBot)")
+
+
+async def stop() -> None:
+ """Shutdown hook for ChatBot feature (closes checkpointer if initialized)."""
+ global _closeCheckpointerCallable
+ try:
+ if callable(_closeCheckpointerCallable):
+ try:
+ await _closeCheckpointerCallable()
+ finally:
+ _closeCheckpointerCallable = None
+ # Dispose engine if created
+ global _engine
+ if _engine is not None:
+ try:
+ await _engine.dispose()
+ finally:
+ _engine = None
+ except Exception as exc:
+ logger.warning(
+ f"ChatBot shutdown encountered an error: {type(exc).__name__}: {str(exc)}",
+ exc_info=True,
+ )
+
+
+async def _initModelsWithRetry(engine, initModelsCallable, *, maxRetries: int = 5, baseDelaySeconds: float = 0.5) -> None:
+ """Initialize DB models with exponential backoff to avoid failing app startup on transient DB issues."""
+ attempt = 0
+ while True:
+ try:
+ await initModelsCallable(engine)
+ return
+ except Exception as exc:
+ attempt += 1
+ if attempt > maxRetries:
+ logger.error(
+ f"Failed to initialize chatbot DB models after {maxRetries} attempts: {type(exc).__name__}: {str(exc)}",
+ exc_info=True,
+ )
+ # Re-raise to let caller handle (feature init may choose to continue)
+ raise
+
+ # For transient connection issues, dispose and recreate the engine before retrying
+ transient = (
+ isinstance(exc, OperationalError)
+ or "ConnectionDoesNotExistError" in type(exc).__name__
+ or "ConnectionResetError" in type(exc).__name__
+ or "WinError 64" in str(exc)
+ )
+ if transient:
+ try:
+ global _engine, _SessionLocal
+ if _engine is not None:
+ await _engine.dispose()
+ _engine, _SessionLocal = _create_engine_with_pool()
+ engine = _engine
+ logger.warning("Recreated async DB engine after transient connection error during init")
+ except Exception as recreate_exc:
+ logger.warning(
+ f"Failed to recreate engine after transient error: {type(recreate_exc).__name__}: {str(recreate_exc)}",
+ exc_info=True,
+ )
+ delay = baseDelaySeconds * (2 ** (attempt - 1))
+ logger.warning(
+ f"DB init failed (attempt {attempt}/{maxRetries}): {type(exc).__name__}: {str(exc)}; retrying in {delay:.1f}s"
+ )
+ await asyncio.sleep(delay)
+
async def get_all_threads_for_user(
*,
user: User,
@@ -685,7 +861,7 @@ async def get_all_tools(*, session: AsyncSession) -> List[dict]:
Returns:
List of tool dictionaries with all tool information.
"""
- from modules.features.chatBot.database import Tool
+ from modules.features.chatBot.subChatbotDatabase import Tool
logger.info("Fetching all tools from database")
@@ -725,7 +901,7 @@ async def grant_tool_to_user(
Raises:
ValueError: If the tool doesn't exist, is not active, or user already has the tool.
"""
- from modules.features.chatBot.database import Tool, UserToolMapping
+ from modules.features.chatBot.subChatbotDatabase import Tool, UserToolMapping
import uuid
logger.info(f"Granting tool {tool_id} to user {user_id}")
@@ -788,7 +964,7 @@ async def revoke_tool_from_user(
Raises:
ValueError: If the mapping doesn't exist.
"""
- from modules.features.chatBot.database import UserToolMapping
+ from modules.features.chatBot.subChatbotDatabase import UserToolMapping
import uuid
logger.info(f"Revoking tool {tool_id} from user {user_id}")
@@ -836,7 +1012,7 @@ async def update_tool(
Raises:
ValueError: If the tool doesn't exist or no fields provided to update.
"""
- from modules.features.chatBot.database import Tool
+ from modules.features.chatBot.subChatbotDatabase import Tool
import uuid
logger.info(f"Updating tool {tool_id}")
@@ -890,7 +1066,7 @@ async def get_tools_for_user(*, user_id: str, session: AsyncSession) -> List[dic
Returns:
List of tool dictionaries with all tool information.
"""
- from modules.features.chatBot.database import Tool, UserToolMapping
+ from modules.features.chatBot.subChatbotDatabase import Tool, UserToolMapping
logger.info(f"Fetching tools for user {user_id}")
@@ -956,7 +1132,7 @@ async def validate_and_get_tools_for_request(
PermissionError: If the user requests tools they don't have access to.
ValueError: If the user has no tools available when trying to use all tools.
"""
- from modules.features.chatBot.database import Tool, UserToolMapping
+ from modules.features.chatBot.subChatbotDatabase import Tool, UserToolMapping
import uuid
logger.info(f"Validating tools for user {user_id}")
diff --git a/modules/features/chatBot/database.py b/modules/features/chatBot/subChatbotDatabase.py
similarity index 100%
rename from modules/features/chatBot/database.py
rename to modules/features/chatBot/subChatbotDatabase.py
diff --git a/modules/features/featuresLifecycle.py b/modules/features/featuresLifecycle.py
new file mode 100644
index 00000000..7089ef31
--- /dev/null
+++ b/modules/features/featuresLifecycle.py
@@ -0,0 +1,26 @@
+import logging
+from modules.interfaces.interfaceDbAppObjects import getRootInterface
+
+logger = logging.getLogger(__name__)
+
+async def start() -> None:
+ """ Start feature triggers and background managers """
+
+ rootInterface = getRootInterface()
+ eventUser = rootInterface.getUserByUsername("event")
+
+ # Feature SyncDelta
+ from modules.features.syncDelta import mainSyncDelta
+ mainSyncDelta.startSyncManager(eventUser)
+
+ # Feature ChatBot
+ from modules.features.chatBot.mainChatBot import start as startChatBot
+ await startChatBot()
+
+
+async def stop() -> None:
+ """ Stop feature triggers and background managers """
+
+ # Feature ChatBot
+ from modules.features.chatBot.mainChatBot import stop as stopChatBot
+ await stopChatBot()
\ No newline at end of file
diff --git a/modules/features/init.py b/modules/features/init.py
deleted file mode 100644
index 541fd1ac..00000000
--- a/modules/features/init.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Launch features as events
-
-import asyncio
-import logging
-from modules.interfaces.interfaceDbAppObjects import getRootInterface
-
-logger = logging.getLogger(__name__)
-rootInterface = getRootInterface()
-eventUser = rootInterface.getUserByUsername("event")
-
-# Custom features launch
-
-from modules.features.syncDelta import mainSyncDelta
-mainSyncDelta.startSyncManager(eventUser)
diff --git a/modules/interfaces/interfaceDbAppObjects.py b/modules/interfaces/interfaceDbAppObjects.py
index 36a07484..2151fae9 100644
--- a/modules/interfaces/interfaceDbAppObjects.py
+++ b/modules/interfaces/interfaceDbAppObjects.py
@@ -793,10 +793,10 @@ class AppObjects:
# Continue with saving the new token even if deletion fails
# Convert to dict and ensure all fields are properly set
- token_dict = token.model_dump()
+ token_dict = token.to_dict()
# Ensure userId is set to current user
# Convert to dict and ensure all fields are properly set
- token_dict = token.model_dump()
+ token_dict = token.to_dict()
# Ensure userId is set to current user
token_dict["userId"] = self.currentUser.id
@@ -829,7 +829,15 @@ class AppObjects:
if not token.createdAt:
token.createdAt = get_utc_timestamp()
- # If replace_existing is True, delete old tokens for this connectionId first
+ # Convert to dict and ensure all fields are properly set
+ token_dict = token.to_dict()
+ # Ensure userId is set to current user
+ token_dict["userId"] = self.currentUser.id
+
+ # Save to database
+ self.db.recordCreate(Token, token_dict)
+
+ # After successful save, delete old tokens for this connectionId (if requested)
if replace_existing:
try:
old_tokens = self.db.getRecordset(
@@ -837,9 +845,7 @@ class AppObjects:
)
deleted_count = 0
for old_token in old_tokens:
- if (
- old_token["id"] != token.id
- ): # Don't delete the new token if it already exists
+ if old_token["id"] != token.id:
self.db.recordDelete(Token, old_token["id"])
deleted_count += 1
@@ -847,20 +853,11 @@ class AppObjects:
logger.info(
f"Replaced {deleted_count} old tokens for connectionId {token.connectionId}"
)
-
except Exception as e:
logger.warning(
f"Failed to delete old tokens for connectionId {token.connectionId}: {str(e)}"
)
- # Continue with saving the new token even if deletion fails
-
- # Convert to dict and ensure all fields are properly set
- token_dict = token.model_dump()
- # Ensure userId is set to current user
- token_dict["userId"] = self.currentUser.id
-
- # Save to database
- self.db.recordCreate(Token, token_dict)
+ # Keep the newly saved token; cleanup can be retried later
except Exception as e:
logger.error(f"Error saving connection token: {str(e)}")
@@ -1218,9 +1215,9 @@ def getRootInterface() -> AppObjects:
if not users:
raise ValueError("Initial user not found in database")
- # Convert to User model
+ # Convert to User model (use helper compatible with our models)
user_data = users[0]
- rootUser = User.model_validate(user_data)
+ rootUser = User.from_dict(user_data)
# Create root interface with the root user
_rootAppObjects = AppObjects(rootUser)
diff --git a/modules/interfaces/interfaceDbChatObjects.py b/modules/interfaces/interfaceDbChatObjects.py
index ff18a9a9..f01ca701 100644
--- a/modules/interfaces/interfaceDbChatObjects.py
+++ b/modules/interfaces/interfaceDbChatObjects.py
@@ -84,12 +84,20 @@ class ChatObjects:
model_fields = {}
if hasattr(model_class, '__fields__'):
model_fields = model_class.__fields__
+ elif hasattr(model_class, 'model_fields'):
+ model_fields = model_class.model_fields
for field_name, value in data.items():
# Check if this field should be stored as JSONB in the database
if field_name in model_fields:
field_info = model_fields[field_name]
- field_type = field_info.type_
+ # Handle both Pydantic v1 and v2
+ if hasattr(field_info, 'type_'):
+ field_type = field_info.type_ # Pydantic v1
+ elif hasattr(field_info, 'annotation'):
+ field_type = field_info.annotation # Pydantic v2
+ else:
+ field_type = type(value) # Fallback
# Check if this is a JSONB field (Dict, List, or complex types)
if (field_type == dict or
@@ -312,8 +320,10 @@ class ChatObjects:
logs_data = object_fields['logs']
try:
for log_data in logs_data:
- if hasattr(log_data, 'dict'):
- log_dict = log_data.dict()
+ if hasattr(log_data, 'model_dump'):
+ log_dict = log_data.model_dump() # Pydantic v2
+ elif hasattr(log_data, 'dict'):
+ log_dict = log_data.dict() # Pydantic v1
elif hasattr(log_data, 'to_dict'):
log_dict = log_data.to_dict()
else:
@@ -326,8 +336,10 @@ class ChatObjects:
messages_data = object_fields['messages']
try:
for message_data in messages_data:
- if hasattr(message_data, 'dict'):
- msg_dict = message_data.dict()
+ if hasattr(message_data, 'model_dump'):
+ msg_dict = message_data.model_dump() # Pydantic v2
+ elif hasattr(message_data, 'dict'):
+ msg_dict = message_data.dict() # Pydantic v1
elif hasattr(message_data, 'to_dict'):
msg_dict = message_data.to_dict()
else:
@@ -536,8 +548,10 @@ class ChatObjects:
created_documents = []
for doc_data in documents_to_create:
# Convert to dict if it's a Pydantic object
- if hasattr(doc_data, 'dict'):
- doc_dict = doc_data.dict()
+ if hasattr(doc_data, 'model_dump'):
+ doc_dict = doc_data.model_dump() # Pydantic v2
+ elif hasattr(doc_data, 'dict'):
+ doc_dict = doc_data.dict() # Pydantic v1
elif hasattr(doc_data, 'to_dict'):
doc_dict = doc_data.to_dict()
else:
@@ -651,8 +665,10 @@ class ChatObjects:
documents_data = object_fields['documents']
try:
for doc_data in documents_data:
- if hasattr(doc_data, 'dict'):
- doc_dict = doc_data.dict()
+ if hasattr(doc_data, 'model_dump'):
+ doc_dict = doc_data.model_dump() # Pydantic v2
+ elif hasattr(doc_data, 'dict'):
+ doc_dict = doc_data.dict() # Pydantic v1
elif hasattr(doc_data, 'to_dict'):
doc_dict = doc_data.to_dict()
else:
@@ -1014,7 +1030,7 @@ class ChatObjects:
items.append({
"type": "message",
"createdAt": msg_timestamp,
- "item": chat_message.dict()
+ "item": chat_message.model_dump() if hasattr(chat_message, 'model_dump') else chat_message.dict()
})
# Get logs
@@ -1029,7 +1045,7 @@ class ChatObjects:
items.append({
"type": "log",
"createdAt": log_timestamp,
- "item": chat_log.dict()
+ "item": chat_log.model_dump() if hasattr(chat_log, 'model_dump') else chat_log.dict()
})
# Get stats
@@ -1044,7 +1060,7 @@ class ChatObjects:
items.append({
"type": "stat",
"createdAt": stat_timestamp,
- "item": chat_stat.dict()
+ "item": chat_stat.model_dump() if hasattr(chat_stat, 'model_dump') else chat_stat.dict()
})
# Sort all items by createdAt timestamp for chronological order
diff --git a/modules/routes/routeChatbot.py b/modules/routes/routeChatbot.py
index a4757c84..75a2ffac 100644
--- a/modules/routes/routeChatbot.py
+++ b/modules/routes/routeChatbot.py
@@ -1,18 +1,16 @@
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.requests import Request
from fastapi.responses import StreamingResponse
-from typing import Any, Dict, List, Optional
-from datetime import datetime
import logging
-import uuid
-from sqlalchemy.ext.asyncio import AsyncSession
-
-from modules.features.chatBot.database import get_async_db_session
-from modules.features.chatBot.service import (
- get_or_create_thread_for_user,
-)
from modules.datamodels.datamodelUam import User, UserPrivilege
+from modules.security.auth import getCurrentUser, limiter
+
+from sqlalchemy.ext.asyncio import AsyncSession
+from modules.features.chatBot.subChatbotDatabase import get_async_db_session
+from modules.features.chatBot.mainChatBot import (
+ get_or_create_thread_for_user,
+ )
from modules.datamodels.datamodelChatbot import (
ChatMessageRequest,
MessageItem,
@@ -30,9 +28,9 @@ from modules.datamodels.datamodelChatbot import (
RevokeToolResponse,
UpdateToolRequest,
UpdateToolResponse,
-)
-from modules.security.auth import getCurrentUser, limiter
-from modules.features.chatBot import service as chat_service
+ )
+from modules.features.chatBot import mainChatBot as chat_service
+
logger = logging.getLogger(__name__)
diff --git a/modules/routes/routeDataPrompts.py b/modules/routes/routeDataPrompts.py
index b3ae6f47..939d2271 100644
--- a/modules/routes/routeDataPrompts.py
+++ b/modules/routes/routeDataPrompts.py
@@ -49,7 +49,7 @@ async def create_prompt(
managementInterface = interfaceDbComponentObjects.getInterface(currentUser)
# Convert Prompt to dict for interface
- prompt_data = prompt.dict()
+ prompt_data = prompt.model_dump() if hasattr(prompt, "model_dump") else prompt.dict()
# Create prompt
newPrompt = managementInterface.createPrompt(prompt_data)
@@ -96,7 +96,10 @@ async def update_prompt(
)
# Convert Prompt to dict for interface, excluding the id field
- update_data = promptData.dict(exclude={'id'})
+ if hasattr(promptData, "model_dump"):
+ update_data = promptData.model_dump(exclude={"id"})
+ else:
+ update_data = promptData.dict(exclude={"id"})
# Update prompt
updatedPrompt = managementInterface.updatePrompt(promptId, update_data)
diff --git a/modules/routes/routeDataUsers.py b/modules/routes/routeDataUsers.py
index 300f6268..e44178a3 100644
--- a/modules/routes/routeDataUsers.py
+++ b/modules/routes/routeDataUsers.py
@@ -18,7 +18,7 @@ import modules.interfaces.interfaceDbAppObjects as interfaceDbAppObjects
from modules.security.auth import getCurrentUser, limiter, getCurrentUser
# Import the attribute definition and helper functions
-from modules.datamodels.datamodelUam import User
+from modules.datamodels.datamodelUam import User, UserPrivilege
from modules.shared.attributeUtils import AttributeDefinition
from modules.shared.attributeUtils import getModelAttributeDefinitions, AttributeResponse
@@ -93,7 +93,7 @@ async def create_user(
appInterface = interfaceDbAppObjects.getInterface(currentUser)
# Convert User to dict for interface
- user_dict = user_data.dict()
+ user_dict = user_data.model_dump() if hasattr(user_data, "model_dump") else user_data.dict()
# Create user
newUser = appInterface.createUser(user_dict)
@@ -120,7 +120,7 @@ async def update_user(
)
# Convert User to dict for interface
- update_data = userData.dict()
+ update_data = userData.model_dump() if hasattr(userData, "model_dump") else userData.dict()
# Update user
updatedUser = appInterface.updateUser(userId, update_data)
@@ -151,7 +151,7 @@ async def reset_user_password(
)
# Get user interface
- appInterface = getInterface(currentUser)
+ appInterface = interfaceDbAppObjects.getInterface(currentUser)
# Get target user
target_user = appInterface.getUserById(userId)
@@ -228,7 +228,7 @@ async def change_password(
"""Change current user's password"""
try:
# Get user interface
- appInterface = getInterface(currentUser)
+ appInterface = interfaceDbAppObjects.getInterface(currentUser)
# Verify current password
if not appInterface.verifyPassword(currentPassword, currentUser.passwordHash):
diff --git a/modules/services/serviceExtraction/subRegistry.py b/modules/services/serviceExtraction/subRegistry.py
index eb2ece4d..e5e84c02 100644
--- a/modules/services/serviceExtraction/subRegistry.py
+++ b/modules/services/serviceExtraction/subRegistry.py
@@ -110,7 +110,6 @@ class ExtractorRegistry:
mime_types = extractor.getSupportedMimeTypes()
for mime_type in mime_types:
self.register(mime_type, extractor)
- logger.debug(f"Registered MIME type: {mime_type} → {extractor.__class__.__name__}")
# Register file extensions
extensions = extractor.getSupportedExtensions()
@@ -118,7 +117,6 @@ class ExtractorRegistry:
# Remove leading dot for registry key
ext_key = ext.lstrip('.')
self.register(ext_key, extractor)
- logger.debug(f"Registered extension: .{ext_key} → {extractor.__class__.__name__}")
except Exception as e:
logger.error(f"Failed to auto-register {extractor.__class__.__name__}: {str(e)}")
diff --git a/modules/services/serviceWorkflow/mainServiceWorkflow.py b/modules/services/serviceWorkflow/mainServiceWorkflow.py
index dba44e80..fe4b4d0f 100644
--- a/modules/services/serviceWorkflow/mainServiceWorkflow.py
+++ b/modules/services/serviceWorkflow/mainServiceWorkflow.py
@@ -94,7 +94,6 @@ class WorkflowService:
for doc in message.documents:
if doc.id == doc_id:
doc_name = getattr(doc, 'fileName', 'unknown')
- logger.debug(f"Found docItem reference {doc_ref}: {doc_name}")
all_documents.append(doc)
break
elif doc_ref.startswith("docList:"):
@@ -104,21 +103,16 @@ class WorkflowService:
# Format: docList::