Adapted chatbot integration

This commit is contained in:
ValueOn AG 2025-10-15 18:23:03 +02:00
parent 82b2fd36dc
commit b97670d939
16 changed files with 336 additions and 191 deletions

108
app.py
View file

@ -2,30 +2,21 @@ import os
import sys import sys
import asyncio import asyncio
from urllib.parse import quote_plus from urllib.parse import quote_plus
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
from modules.features.chatBot.database import init_models as init_chatbot_models
os.environ["NUMEXPR_MAX_THREADS"] = "12" os.environ["NUMEXPR_MAX_THREADS"] = "12"
# Fix for Windows asyncio compatibility with psycopg from fastapi import FastAPI
if sys.platform == "win32":
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
from fastapi import FastAPI, HTTPException, Depends, Body, status, Response
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from fastapi.openapi.models import OAuthFlows as OAuthFlowsModel
from fastapi.security import HTTPBearer from fastapi.security import HTTPBearer
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
import logging import logging
from logging.handlers import RotatingFileHandler from logging.handlers import RotatingFileHandler
from datetime import timedelta, datetime from datetime import datetime
import pathlib
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
from modules.shared.eventManagement import eventManager from modules.shared.eventManagement import eventManager
from modules.features import featuresLifecycle as featuresLifecycle
class DailyRotatingFileHandler(RotatingFileHandler): class DailyRotatingFileHandler(RotatingFileHandler):
""" """
@ -169,6 +160,24 @@ def initLogging():
) )
return True return True
# Add filter to normalize problematic unicode (e.g., arrows) to ASCII for terminals like cp1252
class UnicodeArrowFilter(logging.Filter):
def filter(self, record):
if isinstance(record.msg, str):
translation_map = {
"\u2192": "->", # rightwards arrow
"\u2190": "<-", # leftwards arrow
"\u2194": "<->", # left right arrow
"\u21D2": "=>", # rightwards double arrow
"\u21D0": "<=", # leftwards double arrow
"\u21D4": "<=>", # left right double arrow
"\u00AB": "<<", # left-pointing double angle quotation mark
"\u00BB": ">>", # right-pointing double angle quotation mark
}
for u, ascii_eq in translation_map.items():
record.msg = record.msg.replace(u, ascii_eq)
return True
# Configure handlers based on config # Configure handlers based on config
handlers = [] handlers = []
@ -180,6 +189,7 @@ def initLogging():
consoleHandler.addFilter(HttpcoreStarFilter()) consoleHandler.addFilter(HttpcoreStarFilter())
consoleHandler.addFilter(HTTPDebugFilter()) consoleHandler.addFilter(HTTPDebugFilter())
consoleHandler.addFilter(EmojiFilter()) consoleHandler.addFilter(EmojiFilter())
consoleHandler.addFilter(UnicodeArrowFilter())
handlers.append(consoleHandler) handlers.append(consoleHandler)
# Add file handler if enabled # Add file handler if enabled
@ -195,12 +205,14 @@ def initLogging():
filename_prefix="log_app", filename_prefix="log_app",
max_bytes=rotationSize, max_bytes=rotationSize,
backup_count=backupCount, backup_count=backupCount,
encoding="utf-8",
) )
fileHandler.setFormatter(fileFormatter) fileHandler.setFormatter(fileFormatter)
fileHandler.addFilter(ChromeDevToolsFilter()) fileHandler.addFilter(ChromeDevToolsFilter())
fileHandler.addFilter(HttpcoreStarFilter()) fileHandler.addFilter(HttpcoreStarFilter())
fileHandler.addFilter(HTTPDebugFilter()) fileHandler.addFilter(HTTPDebugFilter())
fileHandler.addFilter(EmojiFilter()) fileHandler.addFilter(EmojiFilter())
fileHandler.addFilter(UnicodeArrowFilter())
handlers.append(fileHandler) handlers.append(fileHandler)
# Configure the root logger # Configure the root logger
@ -247,6 +259,9 @@ def make_sqlalchemy_db_url() -> str:
db = APP_CONFIG.get("SQLALCHEMY_DB_DATABASE", "project_gateway") db = APP_CONFIG.get("SQLALCHEMY_DB_DATABASE", "project_gateway")
user = APP_CONFIG.get("SQLALCHEMY_DB_USER", "postgres") user = APP_CONFIG.get("SQLALCHEMY_DB_USER", "postgres")
pwd = quote_plus(APP_CONFIG.get("SQLALCHEMY_DB_PASSWORD_SECRET", "")) pwd = quote_plus(APP_CONFIG.get("SQLALCHEMY_DB_PASSWORD_SECRET", ""))
# On Windows, prefer asyncpg to avoid psycopg + ProactorEventLoop incompatibility
if sys.platform == "win32":
return f"postgresql+asyncpg://{user}:{pwd}@{host}:{port}/{db}"
return f"postgresql+psycopg://{user}:{pwd}@{host}:{port}/{db}" return f"postgresql+psycopg://{user}:{pwd}@{host}:{port}/{db}"
@ -261,56 +276,15 @@ instanceLabel = APP_CONFIG.get("APP_ENV_LABEL")
async def lifespan(app: FastAPI): async def lifespan(app: FastAPI):
logger.info("Application is starting up") logger.info("Application is starting up")
# --- Init SQLAlchemy --- # --- Init Managers ---
await featuresLifecycle.start()
engine = create_async_engine(
make_sqlalchemy_db_url(), pool_pre_ping=True, echo=False
)
SessionLocal = async_sessionmaker(
engine, expire_on_commit=False, class_=AsyncSession
)
app.state.checkpoint_engine = engine
app.state.checkpoint_sessionmaker = SessionLocal
# NOTE: Might need Alembic migrations in the future
await init_chatbot_models(engine)
# --- Sync tools from registry to database ---
from modules.features.chatBot.database import sync_tools_from_registry
async with SessionLocal() as session:
await sync_tools_from_registry(session)
await session.commit()
logger.info("Tools synced from registry to database")
# --- Initialize LangGraph checkpointer ---
from modules.features.chatBot.utils.checkpointer import (
initialize_checkpointer,
close_checkpointer,
)
try:
await initialize_checkpointer()
logger.info("LangGraph checkpointer initialized successfully")
except Exception as e:
logger.error(f"Failed to initialize LangGraph checkpointer: {str(e)}")
# Continue startup even if checkpointer fails to initialize
# --- Init Event Manager ---
eventManager.start() eventManager.start()
yield yield
# --- Cleanup Event Manager --- # --- Stop Managers ---
eventManager.stop() eventManager.stop()
await featuresLifecycle.stop()
# --- Cleanup LangGraph checkpointer ---
await close_checkpointer()
# --- Cleanup SQLAlchemy ---
await engine.dispose()
logger.info("Application has been shut down") logger.info("Application has been shut down")
@ -401,70 +375,52 @@ app.add_middleware(
ProactiveTokenRefreshMiddleware, enabled=True, check_interval_minutes=5 ProactiveTokenRefreshMiddleware, enabled=True, check_interval_minutes=5
) )
# Run triggered features
import modules.features.init
# Include all routers # Include all routers
from modules.routes.routeAdmin import router as generalRouter
from modules.routes.routeAdmin import router as generalRouter
app.include_router(generalRouter) app.include_router(generalRouter)
from modules.routes.routeAttributes import router as attributesRouter from modules.routes.routeAttributes import router as attributesRouter
app.include_router(attributesRouter) app.include_router(attributesRouter)
from modules.routes.routeDataMandates import router as mandateRouter from modules.routes.routeDataMandates import router as mandateRouter
app.include_router(mandateRouter) app.include_router(mandateRouter)
from modules.routes.routeDataUsers import router as userRouter from modules.routes.routeDataUsers import router as userRouter
app.include_router(userRouter) app.include_router(userRouter)
from modules.routes.routeDataFiles import router as fileRouter from modules.routes.routeDataFiles import router as fileRouter
app.include_router(fileRouter) app.include_router(fileRouter)
from modules.routes.routeDataNeutralization import router as neutralizationRouter from modules.routes.routeDataNeutralization import router as neutralizationRouter
app.include_router(neutralizationRouter) app.include_router(neutralizationRouter)
from modules.routes.routeDataPrompts import router as promptRouter from modules.routes.routeDataPrompts import router as promptRouter
app.include_router(promptRouter) app.include_router(promptRouter)
from modules.routes.routeDataConnections import router as connectionsRouter from modules.routes.routeDataConnections import router as connectionsRouter
app.include_router(connectionsRouter) app.include_router(connectionsRouter)
from modules.routes.routeWorkflows import router as workflowRouter from modules.routes.routeWorkflows import router as workflowRouter
app.include_router(workflowRouter) app.include_router(workflowRouter)
from modules.routes.routeChatPlayground import router as chatPlaygroundRouter from modules.routes.routeChatPlayground import router as chatPlaygroundRouter
app.include_router(chatPlaygroundRouter) app.include_router(chatPlaygroundRouter)
from modules.routes.routeSecurityLocal import router as localRouter from modules.routes.routeSecurityLocal import router as localRouter
app.include_router(localRouter) app.include_router(localRouter)
from modules.routes.routeSecurityMsft import router as msftRouter from modules.routes.routeSecurityMsft import router as msftRouter
app.include_router(msftRouter) app.include_router(msftRouter)
from modules.routes.routeSecurityGoogle import router as googleRouter from modules.routes.routeSecurityGoogle import router as googleRouter
app.include_router(googleRouter) app.include_router(googleRouter)
from modules.routes.routeVoiceGoogle import router as voiceGoogleRouter from modules.routes.routeVoiceGoogle import router as voiceGoogleRouter
app.include_router(voiceGoogleRouter) app.include_router(voiceGoogleRouter)
from modules.routes.routeSecurityAdmin import router as adminSecurityRouter from modules.routes.routeSecurityAdmin import router as adminSecurityRouter
app.include_router(adminSecurityRouter) app.include_router(adminSecurityRouter)
from modules.routes.routeChatbot import router as chatbotRouter from modules.routes.routeChatbot import router as chatbotRouter
app.include_router(chatbotRouter) app.include_router(chatbotRouter)

View file

@ -537,6 +537,12 @@ class DatabaseConnector:
except (json.JSONDecodeError, TypeError): except (json.JSONDecodeError, TypeError):
# If not valid JSON, convert to JSON string # If not valid JSON, convert to JSON string
value = json.dumps(value) value = json.dumps(value)
elif hasattr(value, 'model_dump'):
# Handle Pydantic v2 models
value = json.dumps(value.model_dump())
elif hasattr(value, 'dict'):
# Handle Pydantic v1 models
value = json.dumps(value.dict())
else: else:
# Convert other types to JSON # Convert other types to JSON
value = json.dumps(value) value = json.dumps(value)

View file

@ -1,18 +1,21 @@
"""Service layer for chatbot functionality.""" """Service layer for chatbot functionality."""
import json import json
import asyncio
import logging import logging
from datetime import datetime, timezone from datetime import datetime, timezone
import sys
from typing import AsyncIterator, List, Optional from typing import AsyncIterator, List, Optional
from sqlalchemy import select, update, delete from sqlalchemy import select, update, delete
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker
from sqlalchemy.exc import OperationalError
from modules.features.chatBot.domain.chatbot import Chatbot, get_langchain_model from modules.features.chatBot.domain.chatbot import Chatbot, get_langchain_model
from modules.features.chatBot.utils.checkpointer import get_checkpointer from modules.features.chatBot.utils.checkpointer import get_checkpointer
from modules.features.chatBot.utils.toolRegistry import get_registry from modules.features.chatBot.utils.toolRegistry import get_registry
from modules.features.chatBot.utils import permissions from modules.features.chatBot.utils import permissions
from modules.features.chatBot.database import UserThreadMapping from modules.features.chatBot.subChatbotDatabase import UserThreadMapping
from modules.datamodels.datamodelChatbot import ( from modules.datamodels.datamodelChatbot import (
MessageItem, MessageItem,
ChatMessageResponse, ChatMessageResponse,
@ -28,6 +31,179 @@ from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
_closeCheckpointerCallable = None # set when start() initializes checkpointer
_engine = None
_SessionLocal = None
def _make_sqlalchemy_db_url() -> str:
from urllib.parse import quote_plus
host = APP_CONFIG.get("SQLALCHEMY_DB_HOST", "localhost")
port = APP_CONFIG.get("SQLALCHEMY_DB_PORT", "5432")
db = APP_CONFIG.get("SQLALCHEMY_DB_DATABASE", "project_gateway")
user = APP_CONFIG.get("SQLALCHEMY_DB_USER", "postgres")
pwd = quote_plus(APP_CONFIG.get("SQLALCHEMY_DB_PASSWORD_SECRET", ""))
if sys.platform == "win32":
return f"postgresql+asyncpg://{user}:{pwd}@{host}:{port}/{db}"
return f"postgresql+psycopg://{user}:{pwd}@{host}:{port}/{db}"
def _create_engine_with_pool() -> tuple:
"""Create async SQLAlchemy engine and sessionmaker with resilient pool settings."""
db_url = _make_sqlalchemy_db_url()
# Pool tuning with sensible defaults; overridable via config
pool_size = int(APP_CONFIG.get("SQLALCHEMY_POOL_SIZE", 5))
max_overflow = int(APP_CONFIG.get("SQLALCHEMY_MAX_OVERFLOW", 10))
pool_recycle = int(APP_CONFIG.get("SQLALCHEMY_POOL_RECYCLE_SECONDS", 300))
pool_timeout = int(APP_CONFIG.get("SQLALCHEMY_POOL_TIMEOUT_SECONDS", 30))
connect_timeout = int(APP_CONFIG.get("SQLALCHEMY_CONNECT_TIMEOUT_SECONDS", 10))
engine = create_async_engine(
db_url,
pool_pre_ping=True,
pool_size=pool_size,
max_overflow=max_overflow,
pool_recycle=pool_recycle,
pool_timeout=pool_timeout,
echo=False,
connect_args={
# asyncpg understands timeout; psycopg ignores unknown args safely
"timeout": connect_timeout,
},
)
session_local = async_sessionmaker(engine, expire_on_commit=False, class_=AsyncSession)
return engine, session_local
async def start() -> None:
"""Initialize ChatBot feature at application startup.
- Creates tables if needed
- Syncs tool registry to database
- Initializes LangGraph checkpointer (except in dev)
"""
global _engine, _SessionLocal
from modules.features.chatBot.subChatbotDatabase import init_models as _initModels
from modules.features.chatBot.subChatbotDatabase import (
sync_tools_from_registry as _syncToolsFromRegistry,
)
# Ensure Windows uses SelectorEventLoop for better DB driver compatibility
if sys.platform == "win32":
return
try:
if _engine is None:
_engine, _SessionLocal = _create_engine_with_pool()
# Ensure DB schema exists with retry (handles transient startup issues)
await _initModelsWithRetry(_engine, _initModels)
# Sync tools into DB
async with _SessionLocal() as session:
await _syncToolsFromRegistry(session)
await session.commit()
logger.info("ChatBot tools synced from registry to database")
except Exception as exc:
logger.error(
f"ChatBot startup failed: {type(exc).__name__}: {str(exc)}",
exc_info=True,
)
# Intentionally swallow to avoid aborting app startup
return
# Initialize LangGraph checkpointer (skip in dev)
global _closeCheckpointerCallable
isDev = str(APP_CONFIG.get("APP_ENV_LABEL")).lower() in ("dev", "development")
if not isDev:
try:
from modules.features.chatBot.utils.checkpointer import (
initialize_checkpointer as _initializeCheckpointer,
close_checkpointer as _closeCheckpointer,
)
await _initializeCheckpointer()
_closeCheckpointerCallable = _closeCheckpointer
logger.info("LangGraph checkpointer initialized successfully (ChatBot)")
except Exception as e:
logger.error(
f"Failed to initialize LangGraph checkpointer (ChatBot): {str(e)}"
)
_closeCheckpointerCallable = None
else:
_closeCheckpointerCallable = None
logger.info("LangGraph checkpointer disabled in dev environment (ChatBot)")
async def stop() -> None:
"""Shutdown hook for ChatBot feature (closes checkpointer if initialized)."""
global _closeCheckpointerCallable
try:
if callable(_closeCheckpointerCallable):
try:
await _closeCheckpointerCallable()
finally:
_closeCheckpointerCallable = None
# Dispose engine if created
global _engine
if _engine is not None:
try:
await _engine.dispose()
finally:
_engine = None
except Exception as exc:
logger.warning(
f"ChatBot shutdown encountered an error: {type(exc).__name__}: {str(exc)}",
exc_info=True,
)
async def _initModelsWithRetry(engine, initModelsCallable, *, maxRetries: int = 5, baseDelaySeconds: float = 0.5) -> None:
"""Initialize DB models with exponential backoff to avoid failing app startup on transient DB issues."""
attempt = 0
while True:
try:
await initModelsCallable(engine)
return
except Exception as exc:
attempt += 1
if attempt > maxRetries:
logger.error(
f"Failed to initialize chatbot DB models after {maxRetries} attempts: {type(exc).__name__}: {str(exc)}",
exc_info=True,
)
# Re-raise to let caller handle (feature init may choose to continue)
raise
# For transient connection issues, dispose and recreate the engine before retrying
transient = (
isinstance(exc, OperationalError)
or "ConnectionDoesNotExistError" in type(exc).__name__
or "ConnectionResetError" in type(exc).__name__
or "WinError 64" in str(exc)
)
if transient:
try:
global _engine, _SessionLocal
if _engine is not None:
await _engine.dispose()
_engine, _SessionLocal = _create_engine_with_pool()
engine = _engine
logger.warning("Recreated async DB engine after transient connection error during init")
except Exception as recreate_exc:
logger.warning(
f"Failed to recreate engine after transient error: {type(recreate_exc).__name__}: {str(recreate_exc)}",
exc_info=True,
)
delay = baseDelaySeconds * (2 ** (attempt - 1))
logger.warning(
f"DB init failed (attempt {attempt}/{maxRetries}): {type(exc).__name__}: {str(exc)}; retrying in {delay:.1f}s"
)
await asyncio.sleep(delay)
async def get_all_threads_for_user( async def get_all_threads_for_user(
*, *,
user: User, user: User,
@ -685,7 +861,7 @@ async def get_all_tools(*, session: AsyncSession) -> List[dict]:
Returns: Returns:
List of tool dictionaries with all tool information. List of tool dictionaries with all tool information.
""" """
from modules.features.chatBot.database import Tool from modules.features.chatBot.subChatbotDatabase import Tool
logger.info("Fetching all tools from database") logger.info("Fetching all tools from database")
@ -725,7 +901,7 @@ async def grant_tool_to_user(
Raises: Raises:
ValueError: If the tool doesn't exist, is not active, or user already has the tool. ValueError: If the tool doesn't exist, is not active, or user already has the tool.
""" """
from modules.features.chatBot.database import Tool, UserToolMapping from modules.features.chatBot.subChatbotDatabase import Tool, UserToolMapping
import uuid import uuid
logger.info(f"Granting tool {tool_id} to user {user_id}") logger.info(f"Granting tool {tool_id} to user {user_id}")
@ -788,7 +964,7 @@ async def revoke_tool_from_user(
Raises: Raises:
ValueError: If the mapping doesn't exist. ValueError: If the mapping doesn't exist.
""" """
from modules.features.chatBot.database import UserToolMapping from modules.features.chatBot.subChatbotDatabase import UserToolMapping
import uuid import uuid
logger.info(f"Revoking tool {tool_id} from user {user_id}") logger.info(f"Revoking tool {tool_id} from user {user_id}")
@ -836,7 +1012,7 @@ async def update_tool(
Raises: Raises:
ValueError: If the tool doesn't exist or no fields provided to update. ValueError: If the tool doesn't exist or no fields provided to update.
""" """
from modules.features.chatBot.database import Tool from modules.features.chatBot.subChatbotDatabase import Tool
import uuid import uuid
logger.info(f"Updating tool {tool_id}") logger.info(f"Updating tool {tool_id}")
@ -890,7 +1066,7 @@ async def get_tools_for_user(*, user_id: str, session: AsyncSession) -> List[dic
Returns: Returns:
List of tool dictionaries with all tool information. List of tool dictionaries with all tool information.
""" """
from modules.features.chatBot.database import Tool, UserToolMapping from modules.features.chatBot.subChatbotDatabase import Tool, UserToolMapping
logger.info(f"Fetching tools for user {user_id}") logger.info(f"Fetching tools for user {user_id}")
@ -956,7 +1132,7 @@ async def validate_and_get_tools_for_request(
PermissionError: If the user requests tools they don't have access to. PermissionError: If the user requests tools they don't have access to.
ValueError: If the user has no tools available when trying to use all tools. ValueError: If the user has no tools available when trying to use all tools.
""" """
from modules.features.chatBot.database import Tool, UserToolMapping from modules.features.chatBot.subChatbotDatabase import Tool, UserToolMapping
import uuid import uuid
logger.info(f"Validating tools for user {user_id}") logger.info(f"Validating tools for user {user_id}")

View file

@ -0,0 +1,26 @@
import logging
from modules.interfaces.interfaceDbAppObjects import getRootInterface
logger = logging.getLogger(__name__)
async def start() -> None:
""" Start feature triggers and background managers """
rootInterface = getRootInterface()
eventUser = rootInterface.getUserByUsername("event")
# Feature SyncDelta
from modules.features.syncDelta import mainSyncDelta
mainSyncDelta.startSyncManager(eventUser)
# Feature ChatBot
from modules.features.chatBot.mainChatBot import start as startChatBot
await startChatBot()
async def stop() -> None:
""" Stop feature triggers and background managers """
# Feature ChatBot
from modules.features.chatBot.mainChatBot import stop as stopChatBot
await stopChatBot()

View file

@ -1,14 +0,0 @@
# Launch features as events
import asyncio
import logging
from modules.interfaces.interfaceDbAppObjects import getRootInterface
logger = logging.getLogger(__name__)
rootInterface = getRootInterface()
eventUser = rootInterface.getUserByUsername("event")
# Custom features launch
from modules.features.syncDelta import mainSyncDelta
mainSyncDelta.startSyncManager(eventUser)

View file

@ -793,10 +793,10 @@ class AppObjects:
# Continue with saving the new token even if deletion fails # Continue with saving the new token even if deletion fails
# Convert to dict and ensure all fields are properly set # Convert to dict and ensure all fields are properly set
token_dict = token.model_dump() token_dict = token.to_dict()
# Ensure userId is set to current user # Ensure userId is set to current user
# Convert to dict and ensure all fields are properly set # Convert to dict and ensure all fields are properly set
token_dict = token.model_dump() token_dict = token.to_dict()
# Ensure userId is set to current user # Ensure userId is set to current user
token_dict["userId"] = self.currentUser.id token_dict["userId"] = self.currentUser.id
@ -829,7 +829,15 @@ class AppObjects:
if not token.createdAt: if not token.createdAt:
token.createdAt = get_utc_timestamp() token.createdAt = get_utc_timestamp()
# If replace_existing is True, delete old tokens for this connectionId first # Convert to dict and ensure all fields are properly set
token_dict = token.to_dict()
# Ensure userId is set to current user
token_dict["userId"] = self.currentUser.id
# Save to database
self.db.recordCreate(Token, token_dict)
# After successful save, delete old tokens for this connectionId (if requested)
if replace_existing: if replace_existing:
try: try:
old_tokens = self.db.getRecordset( old_tokens = self.db.getRecordset(
@ -837,9 +845,7 @@ class AppObjects:
) )
deleted_count = 0 deleted_count = 0
for old_token in old_tokens: for old_token in old_tokens:
if ( if old_token["id"] != token.id:
old_token["id"] != token.id
): # Don't delete the new token if it already exists
self.db.recordDelete(Token, old_token["id"]) self.db.recordDelete(Token, old_token["id"])
deleted_count += 1 deleted_count += 1
@ -847,20 +853,11 @@ class AppObjects:
logger.info( logger.info(
f"Replaced {deleted_count} old tokens for connectionId {token.connectionId}" f"Replaced {deleted_count} old tokens for connectionId {token.connectionId}"
) )
except Exception as e: except Exception as e:
logger.warning( logger.warning(
f"Failed to delete old tokens for connectionId {token.connectionId}: {str(e)}" f"Failed to delete old tokens for connectionId {token.connectionId}: {str(e)}"
) )
# Continue with saving the new token even if deletion fails # Keep the newly saved token; cleanup can be retried later
# Convert to dict and ensure all fields are properly set
token_dict = token.model_dump()
# Ensure userId is set to current user
token_dict["userId"] = self.currentUser.id
# Save to database
self.db.recordCreate(Token, token_dict)
except Exception as e: except Exception as e:
logger.error(f"Error saving connection token: {str(e)}") logger.error(f"Error saving connection token: {str(e)}")
@ -1218,9 +1215,9 @@ def getRootInterface() -> AppObjects:
if not users: if not users:
raise ValueError("Initial user not found in database") raise ValueError("Initial user not found in database")
# Convert to User model # Convert to User model (use helper compatible with our models)
user_data = users[0] user_data = users[0]
rootUser = User.model_validate(user_data) rootUser = User.from_dict(user_data)
# Create root interface with the root user # Create root interface with the root user
_rootAppObjects = AppObjects(rootUser) _rootAppObjects = AppObjects(rootUser)

View file

@ -84,12 +84,20 @@ class ChatObjects:
model_fields = {} model_fields = {}
if hasattr(model_class, '__fields__'): if hasattr(model_class, '__fields__'):
model_fields = model_class.__fields__ model_fields = model_class.__fields__
elif hasattr(model_class, 'model_fields'):
model_fields = model_class.model_fields
for field_name, value in data.items(): for field_name, value in data.items():
# Check if this field should be stored as JSONB in the database # Check if this field should be stored as JSONB in the database
if field_name in model_fields: if field_name in model_fields:
field_info = model_fields[field_name] field_info = model_fields[field_name]
field_type = field_info.type_ # Handle both Pydantic v1 and v2
if hasattr(field_info, 'type_'):
field_type = field_info.type_ # Pydantic v1
elif hasattr(field_info, 'annotation'):
field_type = field_info.annotation # Pydantic v2
else:
field_type = type(value) # Fallback
# Check if this is a JSONB field (Dict, List, or complex types) # Check if this is a JSONB field (Dict, List, or complex types)
if (field_type == dict or if (field_type == dict or
@ -312,8 +320,10 @@ class ChatObjects:
logs_data = object_fields['logs'] logs_data = object_fields['logs']
try: try:
for log_data in logs_data: for log_data in logs_data:
if hasattr(log_data, 'dict'): if hasattr(log_data, 'model_dump'):
log_dict = log_data.dict() log_dict = log_data.model_dump() # Pydantic v2
elif hasattr(log_data, 'dict'):
log_dict = log_data.dict() # Pydantic v1
elif hasattr(log_data, 'to_dict'): elif hasattr(log_data, 'to_dict'):
log_dict = log_data.to_dict() log_dict = log_data.to_dict()
else: else:
@ -326,8 +336,10 @@ class ChatObjects:
messages_data = object_fields['messages'] messages_data = object_fields['messages']
try: try:
for message_data in messages_data: for message_data in messages_data:
if hasattr(message_data, 'dict'): if hasattr(message_data, 'model_dump'):
msg_dict = message_data.dict() msg_dict = message_data.model_dump() # Pydantic v2
elif hasattr(message_data, 'dict'):
msg_dict = message_data.dict() # Pydantic v1
elif hasattr(message_data, 'to_dict'): elif hasattr(message_data, 'to_dict'):
msg_dict = message_data.to_dict() msg_dict = message_data.to_dict()
else: else:
@ -536,8 +548,10 @@ class ChatObjects:
created_documents = [] created_documents = []
for doc_data in documents_to_create: for doc_data in documents_to_create:
# Convert to dict if it's a Pydantic object # Convert to dict if it's a Pydantic object
if hasattr(doc_data, 'dict'): if hasattr(doc_data, 'model_dump'):
doc_dict = doc_data.dict() doc_dict = doc_data.model_dump() # Pydantic v2
elif hasattr(doc_data, 'dict'):
doc_dict = doc_data.dict() # Pydantic v1
elif hasattr(doc_data, 'to_dict'): elif hasattr(doc_data, 'to_dict'):
doc_dict = doc_data.to_dict() doc_dict = doc_data.to_dict()
else: else:
@ -651,8 +665,10 @@ class ChatObjects:
documents_data = object_fields['documents'] documents_data = object_fields['documents']
try: try:
for doc_data in documents_data: for doc_data in documents_data:
if hasattr(doc_data, 'dict'): if hasattr(doc_data, 'model_dump'):
doc_dict = doc_data.dict() doc_dict = doc_data.model_dump() # Pydantic v2
elif hasattr(doc_data, 'dict'):
doc_dict = doc_data.dict() # Pydantic v1
elif hasattr(doc_data, 'to_dict'): elif hasattr(doc_data, 'to_dict'):
doc_dict = doc_data.to_dict() doc_dict = doc_data.to_dict()
else: else:
@ -1014,7 +1030,7 @@ class ChatObjects:
items.append({ items.append({
"type": "message", "type": "message",
"createdAt": msg_timestamp, "createdAt": msg_timestamp,
"item": chat_message.dict() "item": chat_message.model_dump() if hasattr(chat_message, 'model_dump') else chat_message.dict()
}) })
# Get logs # Get logs
@ -1029,7 +1045,7 @@ class ChatObjects:
items.append({ items.append({
"type": "log", "type": "log",
"createdAt": log_timestamp, "createdAt": log_timestamp,
"item": chat_log.dict() "item": chat_log.model_dump() if hasattr(chat_log, 'model_dump') else chat_log.dict()
}) })
# Get stats # Get stats
@ -1044,7 +1060,7 @@ class ChatObjects:
items.append({ items.append({
"type": "stat", "type": "stat",
"createdAt": stat_timestamp, "createdAt": stat_timestamp,
"item": chat_stat.dict() "item": chat_stat.model_dump() if hasattr(chat_stat, 'model_dump') else chat_stat.dict()
}) })
# Sort all items by createdAt timestamp for chronological order # Sort all items by createdAt timestamp for chronological order

View file

@ -1,18 +1,16 @@
from fastapi import APIRouter, Depends, HTTPException, status from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.requests import Request from fastapi.requests import Request
from fastapi.responses import StreamingResponse from fastapi.responses import StreamingResponse
from typing import Any, Dict, List, Optional
from datetime import datetime
import logging import logging
import uuid
from sqlalchemy.ext.asyncio import AsyncSession
from modules.features.chatBot.database import get_async_db_session
from modules.features.chatBot.service import (
get_or_create_thread_for_user,
)
from modules.datamodels.datamodelUam import User, UserPrivilege from modules.datamodels.datamodelUam import User, UserPrivilege
from modules.security.auth import getCurrentUser, limiter
from sqlalchemy.ext.asyncio import AsyncSession
from modules.features.chatBot.subChatbotDatabase import get_async_db_session
from modules.features.chatBot.mainChatBot import (
get_or_create_thread_for_user,
)
from modules.datamodels.datamodelChatbot import ( from modules.datamodels.datamodelChatbot import (
ChatMessageRequest, ChatMessageRequest,
MessageItem, MessageItem,
@ -30,9 +28,9 @@ from modules.datamodels.datamodelChatbot import (
RevokeToolResponse, RevokeToolResponse,
UpdateToolRequest, UpdateToolRequest,
UpdateToolResponse, UpdateToolResponse,
) )
from modules.security.auth import getCurrentUser, limiter from modules.features.chatBot import mainChatBot as chat_service
from modules.features.chatBot import service as chat_service
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View file

@ -49,7 +49,7 @@ async def create_prompt(
managementInterface = interfaceDbComponentObjects.getInterface(currentUser) managementInterface = interfaceDbComponentObjects.getInterface(currentUser)
# Convert Prompt to dict for interface # Convert Prompt to dict for interface
prompt_data = prompt.dict() prompt_data = prompt.model_dump() if hasattr(prompt, "model_dump") else prompt.dict()
# Create prompt # Create prompt
newPrompt = managementInterface.createPrompt(prompt_data) newPrompt = managementInterface.createPrompt(prompt_data)
@ -96,7 +96,10 @@ async def update_prompt(
) )
# Convert Prompt to dict for interface, excluding the id field # Convert Prompt to dict for interface, excluding the id field
update_data = promptData.dict(exclude={'id'}) if hasattr(promptData, "model_dump"):
update_data = promptData.model_dump(exclude={"id"})
else:
update_data = promptData.dict(exclude={"id"})
# Update prompt # Update prompt
updatedPrompt = managementInterface.updatePrompt(promptId, update_data) updatedPrompt = managementInterface.updatePrompt(promptId, update_data)

View file

@ -18,7 +18,7 @@ import modules.interfaces.interfaceDbAppObjects as interfaceDbAppObjects
from modules.security.auth import getCurrentUser, limiter, getCurrentUser from modules.security.auth import getCurrentUser, limiter, getCurrentUser
# Import the attribute definition and helper functions # Import the attribute definition and helper functions
from modules.datamodels.datamodelUam import User from modules.datamodels.datamodelUam import User, UserPrivilege
from modules.shared.attributeUtils import AttributeDefinition from modules.shared.attributeUtils import AttributeDefinition
from modules.shared.attributeUtils import getModelAttributeDefinitions, AttributeResponse from modules.shared.attributeUtils import getModelAttributeDefinitions, AttributeResponse
@ -93,7 +93,7 @@ async def create_user(
appInterface = interfaceDbAppObjects.getInterface(currentUser) appInterface = interfaceDbAppObjects.getInterface(currentUser)
# Convert User to dict for interface # Convert User to dict for interface
user_dict = user_data.dict() user_dict = user_data.model_dump() if hasattr(user_data, "model_dump") else user_data.dict()
# Create user # Create user
newUser = appInterface.createUser(user_dict) newUser = appInterface.createUser(user_dict)
@ -120,7 +120,7 @@ async def update_user(
) )
# Convert User to dict for interface # Convert User to dict for interface
update_data = userData.dict() update_data = userData.model_dump() if hasattr(userData, "model_dump") else userData.dict()
# Update user # Update user
updatedUser = appInterface.updateUser(userId, update_data) updatedUser = appInterface.updateUser(userId, update_data)
@ -151,7 +151,7 @@ async def reset_user_password(
) )
# Get user interface # Get user interface
appInterface = getInterface(currentUser) appInterface = interfaceDbAppObjects.getInterface(currentUser)
# Get target user # Get target user
target_user = appInterface.getUserById(userId) target_user = appInterface.getUserById(userId)
@ -228,7 +228,7 @@ async def change_password(
"""Change current user's password""" """Change current user's password"""
try: try:
# Get user interface # Get user interface
appInterface = getInterface(currentUser) appInterface = interfaceDbAppObjects.getInterface(currentUser)
# Verify current password # Verify current password
if not appInterface.verifyPassword(currentPassword, currentUser.passwordHash): if not appInterface.verifyPassword(currentPassword, currentUser.passwordHash):

View file

@ -110,7 +110,6 @@ class ExtractorRegistry:
mime_types = extractor.getSupportedMimeTypes() mime_types = extractor.getSupportedMimeTypes()
for mime_type in mime_types: for mime_type in mime_types:
self.register(mime_type, extractor) self.register(mime_type, extractor)
logger.debug(f"Registered MIME type: {mime_type}{extractor.__class__.__name__}")
# Register file extensions # Register file extensions
extensions = extractor.getSupportedExtensions() extensions = extractor.getSupportedExtensions()
@ -118,7 +117,6 @@ class ExtractorRegistry:
# Remove leading dot for registry key # Remove leading dot for registry key
ext_key = ext.lstrip('.') ext_key = ext.lstrip('.')
self.register(ext_key, extractor) self.register(ext_key, extractor)
logger.debug(f"Registered extension: .{ext_key}{extractor.__class__.__name__}")
except Exception as e: except Exception as e:
logger.error(f"Failed to auto-register {extractor.__class__.__name__}: {str(e)}") logger.error(f"Failed to auto-register {extractor.__class__.__name__}: {str(e)}")

View file

@ -94,7 +94,6 @@ class WorkflowService:
for doc in message.documents: for doc in message.documents:
if doc.id == doc_id: if doc.id == doc_id:
doc_name = getattr(doc, 'fileName', 'unknown') doc_name = getattr(doc, 'fileName', 'unknown')
logger.debug(f"Found docItem reference {doc_ref}: {doc_name}")
all_documents.append(doc) all_documents.append(doc)
break break
elif doc_ref.startswith("docList:"): elif doc_ref.startswith("docList:"):
@ -104,21 +103,16 @@ class WorkflowService:
# Format: docList:<messageId>:<label> # Format: docList:<messageId>:<label>
message_id = parts[1] message_id = parts[1]
label = parts[2] label = parts[2]
logger.debug(f"Looking for message with ID: {message_id} and label: {label}")
# Find the message by ID and get all its documents # Find the message by ID and get all its documents
message_found = False message_found = False
for message in workflow.messages: for message in workflow.messages:
logger.debug(f"Checking message ID: {message.id} (looking for: {message_id})")
if str(message.id) == message_id: if str(message.id) == message_id:
message_found = True message_found = True
logger.debug(f"Found message {message.id} with documentsLabel: {getattr(message, 'documentsLabel', 'None')}")
if message.documents: if message.documents:
doc_names = [doc.fileName for doc in message.documents if hasattr(doc, 'fileName')] doc_names = [doc.fileName for doc in message.documents if hasattr(doc, 'fileName')]
logger.debug(f"Found docList reference {doc_ref}: {len(message.documents)} documents - {doc_names}")
all_documents.extend(message.documents) all_documents.extend(message.documents)
else: else:
logger.debug(f"Found docList reference {doc_ref} but message has no documents") pass
break break
if not message_found: if not message_found:
@ -128,7 +122,6 @@ class WorkflowService:
elif len(parts) >= 2: elif len(parts) >= 2:
# Format: docList:<label> - find message by documentsLabel # Format: docList:<label> - find message by documentsLabel
label = parts[1] label = parts[1]
logger.debug(f"Looking for message with documentsLabel: {label}")
# Find messages with matching documentsLabel # Find messages with matching documentsLabel
matching_messages = [] matching_messages = []
for message in workflow.messages: for message in workflow.messages:
@ -136,10 +129,8 @@ class WorkflowService:
msg_label = getattr(message, 'documentsLabel', None) msg_label = getattr(message, 'documentsLabel', None)
if msg_label == label: if msg_label == label:
matching_messages.append(message) matching_messages.append(message)
logger.debug(f"Found message {message.id} with matching documentsLabel: {msg_label}")
else: else:
# Debug: show what labels we're comparing pass
logger.debug(f"Message {message.id} has documentsLabel: '{msg_label}' (looking for: '{label}')")
if matching_messages: if matching_messages:
# Use the newest message (highest publishedAt) # Use the newest message (highest publishedAt)
@ -148,10 +139,9 @@ class WorkflowService:
if newest_message.documents: if newest_message.documents:
doc_names = [doc.fileName for doc in newest_message.documents if hasattr(doc, 'fileName')] doc_names = [doc.fileName for doc in newest_message.documents if hasattr(doc, 'fileName')]
logger.debug(f"Found docList reference {doc_ref}: {len(newest_message.documents)} documents - {doc_names}")
all_documents.extend(newest_message.documents) all_documents.extend(newest_message.documents)
else: else:
logger.debug(f"Found docList reference {doc_ref} but message has no documents") pass
else: else:
logger.error(f"No messages found with documentsLabel: {label}") logger.error(f"No messages found with documentsLabel: {label}")
raise ValueError(f"Document reference not found: docList:{label}") raise ValueError(f"Document reference not found: docList:{label}")
@ -167,9 +157,6 @@ class WorkflowService:
action_num = int(label_parts[2].replace('action', '')) action_num = int(label_parts[2].replace('action', ''))
context_info = label_parts[3] context_info = label_parts[3]
logger.debug(f"Resolving round reference: round{round_num}_task{task_num}_action{action_num}_{context_info}")
logger.debug(f"Looking for messages with documentsLabel matching: {doc_ref}")
# Find messages with matching documentsLabel (this is the correct way!) # Find messages with matching documentsLabel (this is the correct way!)
# In case of retries, we want the NEWEST message (most recent publishedAt) # In case of retries, we want the NEWEST message (most recent publishedAt)
matching_messages = [] matching_messages = []
@ -180,7 +167,6 @@ class WorkflowService:
if msg_documents_label == doc_ref: if msg_documents_label == doc_ref:
# Found a matching message, collect it for comparison # Found a matching message, collect it for comparison
matching_messages.append(message) matching_messages.append(message)
logger.debug(f"Found message {message.id} with matching documentsLabel: {msg_documents_label}")
# If we found matching messages, take the newest one (highest publishedAt) # If we found matching messages, take the newest one (highest publishedAt)
if matching_messages: if matching_messages:
@ -188,9 +174,6 @@ class WorkflowService:
matching_messages.sort(key=lambda msg: getattr(msg, 'publishedAt', 0), reverse=True) matching_messages.sort(key=lambda msg: getattr(msg, 'publishedAt', 0), reverse=True)
newest_message = matching_messages[0] newest_message = matching_messages[0]
logger.debug(f"Found {len(matching_messages)} matching messages, using newest: {newest_message.id} (publishedAt: {getattr(newest_message, 'publishedAt', 'unknown')})")
logger.debug(f"Newest message has {len(newest_message.documents) if newest_message.documents else 0} documents")
if newest_message.documents: if newest_message.documents:
doc_names = [doc.fileName for doc in newest_message.documents if hasattr(doc, 'fileName')] doc_names = [doc.fileName for doc in newest_message.documents if hasattr(doc, 'fileName')]
logger.debug(f"Added {len(newest_message.documents)} documents from newest message {newest_message.id}: {doc_names}") logger.debug(f"Added {len(newest_message.documents)} documents from newest message {newest_message.id}: {doc_names}")
@ -219,8 +202,6 @@ class WorkflowService:
if token: if token:
if hasattr(token, 'expiresAt') and token.expiresAt: if hasattr(token, 'expiresAt') and token.expiresAt:
current_time = get_utc_timestamp() current_time = get_utc_timestamp()
logger.debug(f"getConnectionReferenceFromUserConnection: Current time: {current_time}")
logger.debug(f"getConnectionReferenceFromUserConnection: Token expires at: {token.expiresAt}")
if current_time > token.expiresAt: if current_time > token.expiresAt:
token_status = "expired" token_status = "expired"
else: else:
@ -624,14 +605,6 @@ class WorkflowService:
# Use the provided workflow object directly to avoid database reload issues # Use the provided workflow object directly to avoid database reload issues
# that can cause filename truncation. The workflow object should already be up-to-date. # that can cause filename truncation. The workflow object should already be up-to-date.
logger.debug(f"Using provided workflow object for getAvailableDocuments (ID: {workflow.id if hasattr(workflow, 'id') else 'unknown'})")
# Debug: Check document filenames in the workflow object
if hasattr(workflow, 'messages') and workflow.messages:
for message in workflow.messages:
if hasattr(message, 'documents') and message.documents:
for doc in message.documents:
logger.debug(f"Workflow document {doc.id}: fileName='{doc.fileName}' (length: {len(doc.fileName)})")
# Get document reference list using the exact same logic as old system # Get document reference list using the exact same logic as old system
document_list = self._getDocumentReferenceList(workflow) document_list = self._getDocumentReferenceList(workflow)
@ -797,8 +770,6 @@ class WorkflowService:
for doc in documents: for doc in documents:
try: try:
original_filename = doc.fileName original_filename = doc.fileName
logger.debug(f"Before refresh - Document {doc.id}: fileName='{original_filename}' (length: {len(original_filename)})")
# Skip invalid docs early if essential identifiers are missing # Skip invalid docs early if essential identifiers are missing
if not getattr(doc, 'fileId', None): if not getattr(doc, 'fileId', None):
logger.debug(f"Skipping document {doc.id} due to missing fileId") logger.debug(f"Skipping document {doc.id} due to missing fileId")
@ -809,8 +780,6 @@ class WorkflowService:
file_info = self.getFileInfo(doc.fileId) file_info = self.getFileInfo(doc.fileId)
if file_info: if file_info:
db_filename = file_info.get("fileName", doc.fileName) db_filename = file_info.get("fileName", doc.fileName)
logger.debug(f"Database filename for {doc.id}: '{db_filename}' (length: {len(db_filename)})")
doc.fileName = file_info.get("fileName", doc.fileName) doc.fileName = file_info.get("fileName", doc.fileName)
doc.fileSize = file_info.get("size", doc.fileSize) doc.fileSize = file_info.get("size", doc.fileSize)
doc.mimeType = file_info.get("mimeType", doc.mimeType) doc.mimeType = file_info.get("mimeType", doc.mimeType)
@ -820,7 +789,6 @@ class WorkflowService:
logger.debug(f"Document {doc.id} has missing mimeType; will be filtered from index") logger.debug(f"Document {doc.id} has missing mimeType; will be filtered from index")
setattr(doc, 'fileSize', 0) setattr(doc, 'fileSize', 0)
logger.debug(f"After refresh - Document {doc.id}: fileName='{doc.fileName}' (length: {len(doc.fileName)})")
else: else:
logger.warning(f"File not found for document {doc.id}, fileId: {doc.fileId}") logger.warning(f"File not found for document {doc.id}, fileId: {doc.fileId}")
setattr(doc, 'fileSize', 0) setattr(doc, 'fileSize', 0)
@ -838,8 +806,6 @@ class WorkflowService:
def _getDocumentReferenceFromChatDocument(self, document, message) -> str: def _getDocumentReferenceFromChatDocument(self, document, message) -> str:
"""Get document reference using document ID and filename.""" """Get document reference using document ID and filename."""
try: try:
# Debug logging to track filename truncation
logger.debug(f"Creating document reference for {document.id}: fileName='{document.fileName}' (length: {len(document.fileName)})")
# Use document ID and filename for simple reference # Use document ID and filename for simple reference
return f"docItem:{document.id}:{document.fileName}" return f"docItem:{document.id}:{document.fileName}"
except Exception as e: except Exception as e:

View file

@ -279,13 +279,21 @@ def getModelAttributeDefinitions(
frontend_options = field_info.extra.get("frontend_options") frontend_options = field_info.extra.get("frontend_options")
# Use frontend type if available, otherwise fall back to Python type # Use frontend type if available, otherwise fall back to Python type
# Handle both Pydantic v1 and v2
if hasattr(field, 'type_'):
field_annotation = field.type_ # Pydantic v1
elif hasattr(field, 'annotation'):
field_annotation = field.annotation # Pydantic v2
else:
field_annotation = type(None) # Fallback
field_type = ( field_type = (
frontend_type frontend_type
if frontend_type if frontend_type
else ( else (
field.type_.__name__ field_annotation.__name__
if hasattr(field.type_, "__name__") if hasattr(field_annotation, "__name__")
else str(field.type_) else str(field_annotation)
) )
) )

View file

@ -104,7 +104,10 @@ class ReactMode(BaseMode):
if getattr(self, 'workflowIntent', None) and result.documents: if getattr(self, 'workflowIntent', None) and result.documents:
validationResult = await self.contentValidator.validateContent(result.documents, self.workflowIntent) validationResult = await self.contentValidator.validateContent(result.documents, self.workflowIntent)
observation['contentValidation'] = validationResult observation['contentValidation'] = validationResult
logger.info(f"Content validation: {validationResult['overallSuccess']} (quality: {validationResult['qualityScore']:.2f})") quality_score = validationResult.get('qualityScore', 0.0)
if quality_score is None:
quality_score = 0.0
logger.info(f"Content validation: {validationResult['overallSuccess']} (quality: {quality_score:.2f})")
# NEW: Learn from feedback # NEW: Learn from feedback
feedback = self._collectFeedback(result, validationResult, self.workflowIntent) feedback = self._collectFeedback(result, validationResult, self.workflowIntent)
@ -578,7 +581,10 @@ class ReactMode(BaseMode):
validation = observation['contentValidation'] validation = observation['contentValidation']
enhancedReviewContent += f"\n\nCONTENT VALIDATION:\n" enhancedReviewContent += f"\n\nCONTENT VALIDATION:\n"
enhancedReviewContent += f"Overall Success: {validation['overallSuccess']}\n" enhancedReviewContent += f"Overall Success: {validation['overallSuccess']}\n"
enhancedReviewContent += f"Quality Score: {validation['qualityScore']:.2f}\n" quality_score = validation.get('qualityScore', 0.0)
if quality_score is None:
quality_score = 0.0
enhancedReviewContent += f"Quality Score: {quality_score:.2f}\n"
if validation['improvementSuggestions']: if validation['improvementSuggestions']:
enhancedReviewContent += f"Improvement Suggestions: {', '.join(validation['improvementSuggestions'])}\n" enhancedReviewContent += f"Improvement Suggestions: {', '.join(validation['improvementSuggestions'])}\n"

View file

@ -20,6 +20,7 @@ bcrypt==4.0.1 # For password hashing
## Database ## Database
mysql-connector-python==8.1.0 mysql-connector-python==8.1.0
SQLAlchemy>=2.0.30
## PDF & Document Processing ## PDF & Document Processing
reportlab==4.0.4 reportlab==4.0.4
@ -58,6 +59,7 @@ Pillow>=10.0.0 # Für Bildverarbeitung (als PIL importiert)
python-dateutil==2.8.2 python-dateutil==2.8.2
python-dotenv==1.0.0 python-dotenv==1.0.0
pytz>=2023.3 # For timezone handling and UTC operations pytz>=2023.3 # For timezone handling and UTC operations
anyio>=4.2.0 # Used by chatbot tools and async utilities
## Dependencies for trio (used by httpx) ## Dependencies for trio (used by httpx)
sortedcontainers>=2.4.0 # Required by trio sortedcontainers>=2.4.0 # Required by trio
@ -109,6 +111,7 @@ xyzservices>=2021.09.1
# PostgreSQL connector dependencies # PostgreSQL connector dependencies
psycopg2-binary==2.9.9 psycopg2-binary==2.9.9
asyncpg==0.30.0
## LangChain & LangGraph ## LangChain & LangGraph
langchain==0.3.27 langchain==0.3.27