module testing
This commit is contained in:
parent
7a9b264170
commit
77e1414744
81 changed files with 1022 additions and 922 deletions
|
|
@ -14,7 +14,7 @@ from modules.datamodels.datamodelDocref import DocumentReferenceList
|
|||
|
||||
# Forward references for circular imports (use string annotations)
|
||||
if TYPE_CHECKING:
|
||||
from modules.datamodels.datamodelChatbot import ChatDocument, ActionResult
|
||||
from modules.datamodels.datamodelChat import ChatDocument, ActionResult
|
||||
from modules.datamodels.datamodelExtraction import ExtractionOptions
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
from typing import Optional, Any, Union, List, Dict, Callable, Awaitable
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelChatbot import ActionResult
|
||||
from modules.datamodels.datamodelChat import ActionResult
|
||||
from modules.shared.frontendTypes import FrontendType
|
||||
from modules.shared.attributeUtils import registerModelLabels
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ import asyncio
|
|||
import re
|
||||
from typing import Optional, Dict, Any, List
|
||||
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow, UserInputRequest, WorkflowModeEnum, ChatLog, ChatDocument
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow, UserInputRequest, WorkflowModeEnum, ChatLog, ChatDocument
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, ProcessingModeEnum
|
||||
from modules.datamodels.datamodelDocref import DocumentReferenceList, DocumentItemReference
|
||||
|
|
@ -335,7 +335,7 @@ async def _emit_log_and_event(
|
|||
# Emit event directly for streaming (using correct signature)
|
||||
if created_log and event_manager:
|
||||
try:
|
||||
from modules.datamodels.datamodelChatbot import ChatLog
|
||||
from modules.datamodels.datamodelChat import ChatLog
|
||||
# Convert to dict if it's a Pydantic model
|
||||
if hasattr(created_log, "model_dump"):
|
||||
log_dict = created_log.model_dump()
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ import logging
|
|||
import json
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow, UserInputRequest, WorkflowModeEnum, AutomationDefinition
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow, UserInputRequest, WorkflowModeEnum, AutomationDefinition
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
from modules.shared.eventManagement import eventManager
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ from modules.security.rbac import RbacClass
|
|||
from modules.datamodels.datamodelRbac import AccessRuleContext
|
||||
from modules.datamodels.datamodelUam import AccessLevel
|
||||
|
||||
from modules.datamodels.datamodelChatbot import (
|
||||
from modules.datamodels.datamodelChat import (
|
||||
ChatDocument,
|
||||
ChatStat,
|
||||
ChatLog,
|
||||
|
|
|
|||
|
|
@ -363,7 +363,16 @@ async def listUsersWithRoles(
|
|||
interface = getRootInterface()
|
||||
|
||||
# Get all users (SysAdmin sees all)
|
||||
users = interface.getUsers()
|
||||
# Use db.getRecordset with UserInDB (the actual database model)
|
||||
from modules.datamodels.datamodelUam import User, UserInDB
|
||||
allUsersData = interface.db.getRecordset(UserInDB)
|
||||
# Convert to User objects, filtering out sensitive fields
|
||||
users = []
|
||||
for u in allUsersData:
|
||||
cleanedUser = {k: v for k, v in u.items() if not k.startswith("_") and k != "hashedPassword" and k != "resetToken" and k != "resetTokenExpires"}
|
||||
if cleanedUser.get("roleLabels") is None:
|
||||
cleanedUser["roleLabels"] = []
|
||||
users.append(User(**cleanedUser))
|
||||
|
||||
# Filter by mandate if specified (via UserMandate table)
|
||||
if mandateId:
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ import json
|
|||
# Import interfaces and models
|
||||
from modules.interfaces.interfaceDbChatbot import getInterface as getChatInterface
|
||||
from modules.auth import getCurrentUser, limiter
|
||||
from modules.datamodels.datamodelChatbot import AutomationDefinition, ChatWorkflow
|
||||
from modules.datamodels.datamodelChat import AutomationDefinition, ChatWorkflow
|
||||
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata, normalize_pagination_dict
|
||||
from modules.shared.attributeUtils import getModelAttributeDefinitions
|
||||
from modules.features.workflow import executeAutomation
|
||||
|
|
|
|||
|
|
@ -321,11 +321,11 @@ async def delete_mandate(
|
|||
# User Management within Mandates (Mandate-Admin)
|
||||
# =============================================================================
|
||||
|
||||
@router.get("/{mandateId}/users", response_model=List[MandateUserInfo])
|
||||
@router.get("/{targetMandateId}/users", response_model=List[MandateUserInfo])
|
||||
@limiter.limit("60/minute")
|
||||
async def listMandateUsers(
|
||||
request: Request,
|
||||
mandateId: str = Path(..., description="ID of the mandate"),
|
||||
targetMandateId: str = Path(..., description="ID of the mandate"),
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
) -> List[MandateUserInfo]:
|
||||
"""
|
||||
|
|
@ -334,7 +334,7 @@ async def listMandateUsers(
|
|||
Requires Mandate-Admin role or SysAdmin.
|
||||
"""
|
||||
# Check permission
|
||||
if not _hasMandateAdminRole(context, mandateId) and not context.isSysAdmin:
|
||||
if not _hasMandateAdminRole(context, targetMandateId) and not context.isSysAdmin:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Mandate-Admin role required"
|
||||
|
|
@ -344,17 +344,17 @@ async def listMandateUsers(
|
|||
rootInterface = interfaceDbAppObjects.getRootInterface()
|
||||
|
||||
# Verify mandate exists
|
||||
mandate = rootInterface.getMandate(mandateId)
|
||||
mandate = rootInterface.getMandate(targetMandateId)
|
||||
if not mandate:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Mandate {mandateId} not found"
|
||||
detail=f"Mandate {targetMandateId} not found"
|
||||
)
|
||||
|
||||
# Get all UserMandate entries for this mandate
|
||||
userMandates = rootInterface.db.getRecordset(
|
||||
UserMandate,
|
||||
recordFilter={"mandateId": mandateId}
|
||||
recordFilter={"mandateId": targetMandateId}
|
||||
)
|
||||
|
||||
result = []
|
||||
|
|
@ -383,18 +383,18 @@ async def listMandateUsers(
|
|||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing users for mandate {mandateId}: {e}")
|
||||
logger.error(f"Error listing users for mandate {targetMandateId}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to list users: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.post("/{mandateId}/users", response_model=UserMandateResponse)
|
||||
@router.post("/{targetMandateId}/users", response_model=UserMandateResponse)
|
||||
@limiter.limit("30/minute")
|
||||
async def addUserToMandate(
|
||||
request: Request,
|
||||
mandateId: str = Path(..., description="ID of the mandate"),
|
||||
targetMandateId: str = Path(..., description="ID of the mandate"),
|
||||
data: UserMandateCreate = Body(...),
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
) -> UserMandateResponse:
|
||||
|
|
@ -405,7 +405,7 @@ async def addUserToMandate(
|
|||
SysAdmin cannot add themselves (Self-Eskalation Prevention).
|
||||
|
||||
Args:
|
||||
mandateId: Target mandate ID
|
||||
targetMandateId: Target mandate ID
|
||||
data: User ID and role IDs to assign
|
||||
"""
|
||||
# 1. SysAdmin Self-Eskalation Prevention
|
||||
|
|
@ -416,7 +416,7 @@ async def addUserToMandate(
|
|||
)
|
||||
|
||||
# 2. Check Mandate-Admin permission
|
||||
if not _hasMandateAdminRole(context, mandateId):
|
||||
if not _hasMandateAdminRole(context, targetMandateId):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Mandate-Admin role required to add users"
|
||||
|
|
@ -426,11 +426,11 @@ async def addUserToMandate(
|
|||
rootInterface = interfaceDbAppObjects.getRootInterface()
|
||||
|
||||
# 3. Verify mandate exists
|
||||
mandate = rootInterface.getMandate(mandateId)
|
||||
mandate = rootInterface.getMandate(targetMandateId)
|
||||
if not mandate:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Mandate {mandateId} not found"
|
||||
detail=f"Mandate {targetMandateId} not found"
|
||||
)
|
||||
|
||||
# 4. Verify target user exists
|
||||
|
|
@ -442,7 +442,7 @@ async def addUserToMandate(
|
|||
)
|
||||
|
||||
# 5. Check if user is already a member
|
||||
existingMembership = rootInterface.getUserMandate(data.targetUserId, mandateId)
|
||||
existingMembership = rootInterface.getUserMandate(data.targetUserId, targetMandateId)
|
||||
if existingMembership:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
|
|
@ -459,7 +459,7 @@ async def addUserToMandate(
|
|||
)
|
||||
role = roleRecords[0]
|
||||
roleMandateId = role.get("mandateId")
|
||||
if roleMandateId and str(roleMandateId) != str(mandateId):
|
||||
if roleMandateId and str(roleMandateId) != str(targetMandateId):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Role {roleId} belongs to a different mandate"
|
||||
|
|
@ -468,14 +468,14 @@ async def addUserToMandate(
|
|||
# 7. Create UserMandate
|
||||
userMandate = rootInterface.createUserMandate(
|
||||
userId=data.targetUserId,
|
||||
mandateId=mandateId,
|
||||
mandateId=targetMandateId,
|
||||
roleIds=data.roleIds
|
||||
)
|
||||
|
||||
# 8. Audit - Log permission change with IP address
|
||||
audit_logger.logPermissionChange(
|
||||
userId=str(context.user.id),
|
||||
mandateId=mandateId,
|
||||
mandateId=targetMandateId,
|
||||
action="user_added_to_mandate",
|
||||
targetUserId=data.targetUserId,
|
||||
details=f"Roles assigned: {data.roleIds}",
|
||||
|
|
@ -484,14 +484,14 @@ async def addUserToMandate(
|
|||
)
|
||||
|
||||
logger.info(
|
||||
f"User {context.user.id} added user {data.targetUserId} to mandate {mandateId} "
|
||||
f"User {context.user.id} added user {data.targetUserId} to mandate {targetMandateId} "
|
||||
f"with roles {data.roleIds}"
|
||||
)
|
||||
|
||||
return UserMandateResponse(
|
||||
userMandateId=str(userMandate.id),
|
||||
userId=data.targetUserId,
|
||||
mandateId=mandateId,
|
||||
mandateId=targetMandateId,
|
||||
roleIds=data.roleIds,
|
||||
enabled=True
|
||||
)
|
||||
|
|
@ -506,11 +506,11 @@ async def addUserToMandate(
|
|||
)
|
||||
|
||||
|
||||
@router.delete("/{mandateId}/users/{targetUserId}", response_model=Dict[str, str])
|
||||
@router.delete("/{targetMandateId}/users/{targetUserId}", response_model=Dict[str, str])
|
||||
@limiter.limit("30/minute")
|
||||
async def removeUserFromMandate(
|
||||
request: Request,
|
||||
mandateId: str = Path(..., description="ID of the mandate"),
|
||||
targetMandateId: str = Path(..., description="ID of the mandate"),
|
||||
targetUserId: str = Path(..., description="ID of the user to remove"),
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
) -> Dict[str, str]:
|
||||
|
|
@ -521,11 +521,11 @@ async def removeUserFromMandate(
|
|||
Cannot remove the last admin from a mandate (orphan prevention).
|
||||
|
||||
Args:
|
||||
mandateId: Target mandate ID
|
||||
targetMandateId: Target mandate ID
|
||||
targetUserId: User ID to remove
|
||||
"""
|
||||
# Check Mandate-Admin permission
|
||||
if not _hasMandateAdminRole(context, mandateId):
|
||||
if not _hasMandateAdminRole(context, targetMandateId):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Mandate-Admin role required"
|
||||
|
|
@ -535,15 +535,15 @@ async def removeUserFromMandate(
|
|||
rootInterface = interfaceDbAppObjects.getRootInterface()
|
||||
|
||||
# Verify mandate exists
|
||||
mandate = rootInterface.getMandate(mandateId)
|
||||
mandate = rootInterface.getMandate(targetMandateId)
|
||||
if not mandate:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Mandate {mandateId} not found"
|
||||
detail=f"Mandate {targetMandateId} not found"
|
||||
)
|
||||
|
||||
# Get user's membership
|
||||
membership = rootInterface.getUserMandate(targetUserId, mandateId)
|
||||
membership = rootInterface.getUserMandate(targetUserId, targetMandateId)
|
||||
if not membership:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
|
|
@ -551,26 +551,26 @@ async def removeUserFromMandate(
|
|||
)
|
||||
|
||||
# Check if this is the last admin (orphan prevention)
|
||||
if _isLastMandateAdmin(rootInterface, mandateId, targetUserId):
|
||||
if _isLastMandateAdmin(rootInterface, targetMandateId, targetUserId):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Cannot remove the last admin from a mandate. Assign another admin first."
|
||||
)
|
||||
|
||||
# Delete UserMandate (CASCADE will delete UserMandateRole entries)
|
||||
rootInterface.deleteUserMandate(targetUserId, mandateId)
|
||||
rootInterface.deleteUserMandate(targetUserId, targetMandateId)
|
||||
|
||||
# Audit - Log permission change
|
||||
audit_logger.logPermissionChange(
|
||||
userId=str(context.user.id),
|
||||
mandateId=mandateId,
|
||||
mandateId=targetMandateId,
|
||||
action="user_removed_from_mandate",
|
||||
targetUserId=targetUserId,
|
||||
details="User removed from mandate",
|
||||
resourceType="UserMandate"
|
||||
)
|
||||
|
||||
logger.info(f"User {context.user.id} removed user {targetUserId} from mandate {mandateId}")
|
||||
logger.info(f"User {context.user.id} removed user {targetUserId} from mandate {targetMandateId}")
|
||||
|
||||
return {"message": "User removed from mandate", "userId": targetUserId}
|
||||
|
||||
|
|
@ -584,11 +584,11 @@ async def removeUserFromMandate(
|
|||
)
|
||||
|
||||
|
||||
@router.put("/{mandateId}/users/{targetUserId}/roles", response_model=UserMandateResponse)
|
||||
@router.put("/{targetMandateId}/users/{targetUserId}/roles", response_model=UserMandateResponse)
|
||||
@limiter.limit("30/minute")
|
||||
async def updateUserRolesInMandate(
|
||||
request: Request,
|
||||
mandateId: str = Path(..., description="ID of the mandate"),
|
||||
targetMandateId: str = Path(..., description="ID of the mandate"),
|
||||
targetUserId: str = Path(..., description="ID of the user"),
|
||||
roleIds: List[str] = Body(..., description="New role IDs to assign"),
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
|
|
@ -600,12 +600,12 @@ async def updateUserRolesInMandate(
|
|||
Requires Mandate-Admin role.
|
||||
|
||||
Args:
|
||||
mandateId: Target mandate ID
|
||||
targetMandateId: Target mandate ID
|
||||
targetUserId: User ID to update
|
||||
roleIds: New set of role IDs
|
||||
"""
|
||||
# Check Mandate-Admin permission
|
||||
if not _hasMandateAdminRole(context, mandateId):
|
||||
if not _hasMandateAdminRole(context, targetMandateId):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Mandate-Admin role required"
|
||||
|
|
@ -615,7 +615,7 @@ async def updateUserRolesInMandate(
|
|||
rootInterface = interfaceDbAppObjects.getRootInterface()
|
||||
|
||||
# Get user's membership
|
||||
membership = rootInterface.getUserMandate(targetUserId, mandateId)
|
||||
membership = rootInterface.getUserMandate(targetUserId, targetMandateId)
|
||||
if not membership:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
|
|
@ -632,7 +632,7 @@ async def updateUserRolesInMandate(
|
|||
)
|
||||
role = roleRecords[0]
|
||||
roleMandateId = role.get("mandateId")
|
||||
if roleMandateId and str(roleMandateId) != str(mandateId):
|
||||
if roleMandateId and str(roleMandateId) != str(targetMandateId):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Role {roleId} belongs to a different mandate"
|
||||
|
|
@ -640,11 +640,11 @@ async def updateUserRolesInMandate(
|
|||
|
||||
# Check if removing admin role would leave mandate without admins
|
||||
currentRoleIds = rootInterface.getRoleIdsForUserMandate(str(membership.id))
|
||||
isCurrentlyAdmin = _hasAdminRoleInList(rootInterface, currentRoleIds, mandateId)
|
||||
willBeAdmin = _hasAdminRoleInList(rootInterface, roleIds, mandateId)
|
||||
isCurrentlyAdmin = _hasAdminRoleInList(rootInterface, currentRoleIds, targetMandateId)
|
||||
willBeAdmin = _hasAdminRoleInList(rootInterface, roleIds, targetMandateId)
|
||||
|
||||
if isCurrentlyAdmin and not willBeAdmin:
|
||||
if _isLastMandateAdmin(rootInterface, mandateId, targetUserId):
|
||||
if _isLastMandateAdmin(rootInterface, targetMandateId, targetUserId):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Cannot remove admin role from the last admin. Assign another admin first."
|
||||
|
|
@ -665,7 +665,7 @@ async def updateUserRolesInMandate(
|
|||
# Audit - Log role assignment change
|
||||
audit_logger.logPermissionChange(
|
||||
userId=str(context.user.id),
|
||||
mandateId=mandateId,
|
||||
mandateId=targetMandateId,
|
||||
action="role_assigned",
|
||||
targetUserId=targetUserId,
|
||||
details=f"New roles: {roleIds}",
|
||||
|
|
@ -675,13 +675,13 @@ async def updateUserRolesInMandate(
|
|||
|
||||
logger.info(
|
||||
f"User {context.user.id} updated roles for user {targetUserId} "
|
||||
f"in mandate {mandateId} to {roleIds}"
|
||||
f"in mandate {targetMandateId} to {roleIds}"
|
||||
)
|
||||
|
||||
return UserMandateResponse(
|
||||
userMandateId=str(membership.id),
|
||||
userId=targetUserId,
|
||||
mandateId=mandateId,
|
||||
mandateId=targetMandateId,
|
||||
roleIds=roleIds,
|
||||
enabled=membership.enabled
|
||||
)
|
||||
|
|
|
|||
|
|
@ -71,16 +71,43 @@ async def get_users(
|
|||
# MULTI-TENANT: Use mandateId from context (header)
|
||||
# SysAdmin without mandateId can see all users
|
||||
if context.mandateId:
|
||||
# Get users for specific mandate via UserMandate table
|
||||
from modules.datamodels.datamodelMembership import UserMandate
|
||||
userMandates = appInterface.db.getRecordset(UserMandate, recordFilter={"mandateId": str(context.mandateId)})
|
||||
userIds = [str(um["userId"]) for um in userMandates]
|
||||
# Get users for specific mandate using getUsersByMandate
|
||||
result = appInterface.getUsersByMandate(str(context.mandateId), paginationParams)
|
||||
|
||||
# Get all users and filter by mandate membership
|
||||
allUsers = appInterface.getUsers()
|
||||
users = [u for u in allUsers if str(u.id) in userIds]
|
||||
# getUsersByMandate returns PaginatedResult if pagination was provided
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
return PaginatedResponse(
|
||||
items=result.items,
|
||||
pagination=PaginationMetadata(
|
||||
currentPage=result.currentPage,
|
||||
pageSize=result.pageSize,
|
||||
totalItems=result.totalItems,
|
||||
totalPages=result.totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
)
|
||||
)
|
||||
else:
|
||||
# No pagination - result is a list
|
||||
users = result if isinstance(result, list) else result.items if hasattr(result, 'items') else []
|
||||
return PaginatedResponse(
|
||||
items=users,
|
||||
pagination=None
|
||||
)
|
||||
elif context.isSysAdmin:
|
||||
# SysAdmin without mandateId sees all users
|
||||
# Get all users directly from database using UserInDB (the actual database model)
|
||||
from modules.datamodels.datamodelUam import UserInDB
|
||||
allUsers = appInterface.db.getRecordset(UserInDB)
|
||||
# Convert to User objects, filtering out password hash and database-specific fields
|
||||
users = []
|
||||
for u in allUsers:
|
||||
cleanedUser = {k: v for k, v in u.items() if not k.startswith("_") and k != "hashedPassword" and k != "resetToken" and k != "resetTokenExpires"}
|
||||
# Ensure roleLabels is always a list
|
||||
if cleanedUser.get("roleLabels") is None:
|
||||
cleanedUser["roleLabels"] = []
|
||||
users.append(User(**cleanedUser))
|
||||
|
||||
# Apply pagination manually if needed
|
||||
if paginationParams:
|
||||
totalItems = len(users)
|
||||
import math
|
||||
|
|
@ -105,33 +132,6 @@ async def get_users(
|
|||
items=users,
|
||||
pagination=None
|
||||
)
|
||||
elif context.isSysAdmin:
|
||||
# SysAdmin without mandateId sees all users
|
||||
result = appInterface.getUsers()
|
||||
if paginationParams:
|
||||
totalItems = len(result)
|
||||
import math
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
endIdx = startIdx + paginationParams.pageSize
|
||||
paginatedUsers = result[startIdx:endIdx]
|
||||
|
||||
return PaginatedResponse(
|
||||
items=paginatedUsers,
|
||||
pagination=PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
totalItems=totalItems,
|
||||
totalPages=totalPages,
|
||||
sort=paginationParams.sort,
|
||||
filters=paginationParams.filters
|
||||
)
|
||||
)
|
||||
else:
|
||||
return PaginatedResponse(
|
||||
items=result,
|
||||
pagination=None
|
||||
)
|
||||
else:
|
||||
# Non-SysAdmin without mandateId - should not happen (getRequestContext enforces)
|
||||
raise HTTPException(
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ from modules.auth import limiter, getRequestContext, RequestContext
|
|||
import modules.interfaces.interfaceDbChatbot as interfaceDbChatbot
|
||||
|
||||
# Import models
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow, UserInputRequest, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow, UserInputRequest, WorkflowModeEnum
|
||||
|
||||
# Import workflow control functions
|
||||
from modules.features.workflow import chatStart, chatStop
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ import modules.interfaces.interfaceDbChatbot as interfaceDbChatbot
|
|||
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
|
||||
|
||||
# Import models
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow, UserInputRequest, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow, UserInputRequest, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse
|
||||
|
||||
# Import chatbot feature
|
||||
|
|
|
|||
|
|
@ -89,6 +89,212 @@ async def listFeatures(
|
|||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# My Feature Instances (No mandate context needed)
|
||||
# IMPORTANT: Must be before /{featureCode} to avoid route matching conflict
|
||||
# =============================================================================
|
||||
|
||||
class FeaturesMyResponse(BaseModel):
|
||||
"""Hierarchical response for GET /features/my"""
|
||||
mandates: List[Dict[str, Any]]
|
||||
|
||||
|
||||
@router.get("/my", response_model=FeaturesMyResponse)
|
||||
@limiter.limit("60/minute")
|
||||
async def getMyFeatureInstances(
|
||||
request: Request,
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
) -> FeaturesMyResponse:
|
||||
"""
|
||||
Get all feature instances the current user has access to.
|
||||
|
||||
Returns hierarchical structure: mandates -> features -> instances -> permissions
|
||||
This endpoint does not require X-Mandate-Id header.
|
||||
"""
|
||||
try:
|
||||
rootInterface = getRootInterface()
|
||||
featureInterface = getFeatureInterface(rootInterface.db)
|
||||
|
||||
# Get all feature accesses for this user
|
||||
featureAccesses = rootInterface.getFeatureAccessesForUser(str(context.user.id))
|
||||
|
||||
if not featureAccesses:
|
||||
return FeaturesMyResponse(mandates=[])
|
||||
|
||||
# Build hierarchical structure: mandate -> feature -> instances
|
||||
mandatesMap: Dict[str, Dict[str, Any]] = {}
|
||||
featuresMap: Dict[str, Dict[str, Any]] = {} # key: mandateId_featureCode
|
||||
|
||||
for access in featureAccesses:
|
||||
if not access.enabled:
|
||||
continue
|
||||
|
||||
instance = featureInterface.getFeatureInstance(str(access.featureInstanceId))
|
||||
if not instance or not instance.enabled:
|
||||
continue
|
||||
|
||||
# Get mandate info
|
||||
mandateId = str(instance.mandateId)
|
||||
if mandateId not in mandatesMap:
|
||||
mandate = rootInterface.getMandate(mandateId)
|
||||
if mandate:
|
||||
mandatesMap[mandateId] = {
|
||||
"id": mandateId,
|
||||
"name": mandate.name if hasattr(mandate, 'name') else mandateId,
|
||||
"code": mandate.code if hasattr(mandate, 'code') else None,
|
||||
"features": []
|
||||
}
|
||||
else:
|
||||
mandatesMap[mandateId] = {
|
||||
"id": mandateId,
|
||||
"name": mandateId,
|
||||
"code": None,
|
||||
"features": []
|
||||
}
|
||||
|
||||
# Get feature info
|
||||
featureKey = f"{mandateId}_{instance.featureCode}"
|
||||
if featureKey not in featuresMap:
|
||||
feature = featureInterface.getFeature(instance.featureCode)
|
||||
featuresMap[featureKey] = {
|
||||
"code": instance.featureCode,
|
||||
"label": feature.label if feature and hasattr(feature, 'label') else {"de": instance.featureCode, "en": instance.featureCode},
|
||||
"icon": feature.icon if feature and hasattr(feature, 'icon') else "folder",
|
||||
"instances": [],
|
||||
"_mandateId": mandateId # Temporary for grouping
|
||||
}
|
||||
|
||||
# Get user's role in this instance
|
||||
userRole = _getUserRoleInInstance(rootInterface, str(context.user.id), str(instance.id))
|
||||
|
||||
# Get permissions for this instance
|
||||
permissions = _getInstancePermissions(rootInterface, str(context.user.id), str(instance.id))
|
||||
|
||||
# Add instance to feature
|
||||
featuresMap[featureKey]["instances"].append({
|
||||
"id": str(instance.id),
|
||||
"featureCode": instance.featureCode,
|
||||
"mandateId": mandateId,
|
||||
"mandateName": mandatesMap[mandateId]["name"],
|
||||
"instanceLabel": instance.label,
|
||||
"userRole": userRole,
|
||||
"permissions": permissions
|
||||
})
|
||||
|
||||
# Build final structure
|
||||
for featureKey, featureData in featuresMap.items():
|
||||
mandateId = featureData.pop("_mandateId")
|
||||
mandatesMap[mandateId]["features"].append(featureData)
|
||||
|
||||
return FeaturesMyResponse(mandates=list(mandatesMap.values()))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting user's feature instances: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to get feature instances: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
def _getUserRoleInInstance(rootInterface, userId: str, instanceId: str) -> str:
|
||||
"""Get the user's primary role label in a feature instance."""
|
||||
try:
|
||||
from modules.datamodels.datamodelRbac import UserRole, Role
|
||||
|
||||
# Get user-role assignments for this instance
|
||||
userRoles = rootInterface.db.getRecordset(
|
||||
UserRole,
|
||||
recordFilter={"userId": userId}
|
||||
)
|
||||
|
||||
for ur in userRoles:
|
||||
roleId = ur.get("roleId")
|
||||
if roleId:
|
||||
roles = rootInterface.db.getRecordset(Role, recordFilter={"id": roleId})
|
||||
if roles and str(roles[0].get("featureInstanceId")) == instanceId:
|
||||
return roles[0].get("roleLabel", "user")
|
||||
|
||||
return "user" # Default
|
||||
except Exception as e:
|
||||
logger.debug(f"Error getting user role: {e}")
|
||||
return "user"
|
||||
|
||||
|
||||
def _getInstancePermissions(rootInterface, userId: str, instanceId: str) -> Dict[str, Any]:
|
||||
"""Get summarized permissions for a user in an instance."""
|
||||
# Default permissions structure
|
||||
permissions = {
|
||||
"tables": {},
|
||||
"views": {},
|
||||
"fields": {}
|
||||
}
|
||||
|
||||
try:
|
||||
from modules.datamodels.datamodelRbac import UserRole, Role, RolePermission
|
||||
|
||||
# Get user's roles for this instance
|
||||
userRoles = rootInterface.db.getRecordset(UserRole, recordFilter={"userId": userId})
|
||||
roleIds = []
|
||||
|
||||
for ur in userRoles:
|
||||
roleId = ur.get("roleId")
|
||||
if roleId:
|
||||
roles = rootInterface.db.getRecordset(Role, recordFilter={"id": roleId})
|
||||
if roles and str(roles[0].get("featureInstanceId")) == instanceId:
|
||||
roleIds.append(roleId)
|
||||
|
||||
if not roleIds:
|
||||
return permissions
|
||||
|
||||
# Get permissions for all roles
|
||||
for roleId in roleIds:
|
||||
rolePerms = rootInterface.db.getRecordset(
|
||||
RolePermission,
|
||||
recordFilter={"roleId": roleId}
|
||||
)
|
||||
|
||||
for perm in rolePerms:
|
||||
tableName = perm.get("tableName", "")
|
||||
if tableName:
|
||||
if tableName not in permissions["tables"]:
|
||||
permissions["tables"][tableName] = {
|
||||
"view": False,
|
||||
"read": "n",
|
||||
"create": "n",
|
||||
"update": "n",
|
||||
"delete": "n"
|
||||
}
|
||||
|
||||
# Merge permissions (highest wins)
|
||||
current = permissions["tables"][tableName]
|
||||
current["view"] = current["view"] or perm.get("canView", False)
|
||||
current["read"] = _mergeAccessLevel(current["read"], perm.get("readLevel", "n"))
|
||||
current["create"] = _mergeAccessLevel(current["create"], perm.get("createLevel", "n"))
|
||||
current["update"] = _mergeAccessLevel(current["update"], perm.get("updateLevel", "n"))
|
||||
current["delete"] = _mergeAccessLevel(current["delete"], perm.get("deleteLevel", "n"))
|
||||
|
||||
viewName = perm.get("viewName", "")
|
||||
if viewName:
|
||||
permissions["views"][viewName] = permissions["views"].get(viewName, False) or perm.get("canAccess", False)
|
||||
|
||||
return permissions
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(f"Error getting instance permissions: {e}")
|
||||
return permissions
|
||||
|
||||
|
||||
def _mergeAccessLevel(current: str, new: str) -> str:
|
||||
"""Merge two access levels, returning the highest."""
|
||||
levels = {"n": 0, "m": 1, "g": 2, "a": 3}
|
||||
currentLevel = levels.get(current, 0)
|
||||
newLevel = levels.get(new, 0)
|
||||
|
||||
if newLevel > currentLevel:
|
||||
return new
|
||||
return current
|
||||
|
||||
|
||||
@router.get("/{featureCode}", response_model=Dict[str, Any])
|
||||
@limiter.limit("60/minute")
|
||||
async def getFeature(
|
||||
|
|
@ -539,55 +745,6 @@ async def createTemplateRole(
|
|||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# My Feature Instances (No mandate context needed)
|
||||
# =============================================================================
|
||||
|
||||
@router.get("/my", response_model=List[Dict[str, Any]])
|
||||
@limiter.limit("60/minute")
|
||||
async def getMyFeatureInstances(
|
||||
request: Request,
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all feature instances the current user has access to.
|
||||
|
||||
Returns instances across all mandates the user is member of.
|
||||
This endpoint does not require X-Mandate-Id header.
|
||||
"""
|
||||
try:
|
||||
rootInterface = getRootInterface()
|
||||
|
||||
# Get all feature accesses for this user
|
||||
featureAccesses = rootInterface.getFeatureAccessesForUser(str(context.user.id))
|
||||
|
||||
if not featureAccesses:
|
||||
return []
|
||||
|
||||
featureInterface = getFeatureInterface(rootInterface.db)
|
||||
result = []
|
||||
|
||||
for access in featureAccesses:
|
||||
if not access.enabled:
|
||||
continue
|
||||
|
||||
instance = featureInterface.getFeatureInstance(str(access.featureInstanceId))
|
||||
if instance and instance.enabled:
|
||||
result.append({
|
||||
**instance.model_dump(),
|
||||
"accessId": str(access.id)
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting user's feature instances: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to get feature instances: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Helper Functions
|
||||
# =============================================================================
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ from modules.interfaces.interfaceDbChatbot import getInterface
|
|||
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
|
||||
|
||||
# Import models
|
||||
from modules.datamodels.datamodelChatbot import (
|
||||
from modules.datamodels.datamodelChat import (
|
||||
ChatWorkflow,
|
||||
ChatMessage,
|
||||
ChatLog,
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
from typing import Any, Optional
|
||||
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow
|
||||
|
||||
class PublicService:
|
||||
"""Lightweight proxy exposing only public callable attributes of a target.
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import re
|
|||
import time
|
||||
import base64
|
||||
from typing import Dict, Any, List, Optional, Tuple
|
||||
from modules.datamodels.datamodelChatbot import PromptPlaceholder, ChatDocument
|
||||
from modules.datamodels.datamodelChat import PromptPlaceholder, ChatDocument
|
||||
from modules.services.serviceExtraction.mainServiceExtraction import ExtractionService
|
||||
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum, ProcessingModeEnum
|
||||
from modules.datamodels.datamodelExtraction import ContentPart, DocumentIntent
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ import logging
|
|||
import base64
|
||||
from typing import Dict, Any, List, Optional
|
||||
|
||||
from modules.datamodels.datamodelChatbot import ChatDocument
|
||||
from modules.datamodels.datamodelChat import ChatDocument
|
||||
from modules.datamodels.datamodelExtraction import ContentPart, DocumentIntent
|
||||
from modules.workflows.processing.shared.stateTools import checkWorkflowStopped
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ import json
|
|||
import logging
|
||||
from typing import Dict, Any, List, Optional
|
||||
|
||||
from modules.datamodels.datamodelChatbot import ChatDocument
|
||||
from modules.datamodels.datamodelChat import ChatDocument
|
||||
from modules.datamodels.datamodelExtraction import DocumentIntent
|
||||
from modules.workflows.processing.shared.stateTools import checkWorkflowStopped
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
import logging
|
||||
from typing import Dict, Any, List, Optional
|
||||
from modules.datamodels.datamodelUam import User, UserConnection
|
||||
from modules.datamodels.datamodelChatbot import ChatDocument, ChatMessage, ChatStat, ChatLog
|
||||
from modules.datamodels.datamodelChat import ChatDocument, ChatMessage, ChatStat, ChatLog
|
||||
from modules.datamodels.datamodelAi import AiCallOptions, OperationTypeEnum, PriorityEnum, ProcessingModeEnum
|
||||
from modules.shared.progressLogger import ProgressLogger
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import json
|
|||
from .subRegistry import ExtractorRegistry, ChunkerRegistry
|
||||
from .subPipeline import runExtraction
|
||||
from modules.datamodels.datamodelExtraction import ContentExtracted, ContentPart, MergeStrategy, ExtractionOptions, PartResult, DocumentIntent
|
||||
from modules.datamodels.datamodelChatbot import ChatDocument
|
||||
from modules.datamodels.datamodelChat import ChatDocument
|
||||
from modules.datamodels.datamodelAi import AiCallResponse, AiCallRequest, AiCallOptions, OperationTypeEnum, AiModelCall
|
||||
from modules.aicore.aicoreModelRegistry import modelRegistry
|
||||
from modules.aicore.aicoreModelSelector import modelSelector
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import base64
|
|||
import traceback
|
||||
from typing import Any, Dict, List, Optional, Callable
|
||||
from modules.datamodels.datamodelDocument import RenderedDocument
|
||||
from modules.datamodels.datamodelChatbot import ChatDocument
|
||||
from modules.datamodels.datamodelChat import ChatDocument
|
||||
from modules.services.serviceGeneration.subDocumentUtility import (
|
||||
getFileExtension,
|
||||
getMimeTypeFromExtension,
|
||||
|
|
|
|||
|
|
@ -21,6 +21,22 @@ from typing import Optional, List
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _getConnection(dbConnector):
|
||||
"""Get a connection from the DatabaseConnector.
|
||||
|
||||
Ensures the connection is alive and returns it.
|
||||
Commits any pending transaction first to avoid blocking.
|
||||
"""
|
||||
dbConnector._ensure_connection()
|
||||
conn = dbConnector.connection
|
||||
# Commit any pending transaction to avoid blocking
|
||||
try:
|
||||
conn.commit()
|
||||
except Exception:
|
||||
pass # Ignore if nothing to commit
|
||||
return conn
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Index Definitions
|
||||
# =============================================================================
|
||||
|
|
@ -144,28 +160,45 @@ def applyMultiTenantOptimizations(dbConnector, tables: Optional[List[str]] = Non
|
|||
|
||||
try:
|
||||
# Get a connection from the connector
|
||||
conn = dbConnector._get_connection()
|
||||
conn.autocommit = True
|
||||
conn = _getConnection(dbConnector)
|
||||
|
||||
with conn.cursor() as cursor:
|
||||
# Apply indexes
|
||||
results["indexesCreated"] = _applyIndexes(cursor, tables)
|
||||
|
||||
# Apply foreign keys
|
||||
results["foreignKeysCreated"] = _applyForeignKeys(cursor, tables)
|
||||
|
||||
# Apply immutable triggers
|
||||
results["triggersCreated"] = _applyImmutableTriggers(cursor, tables)
|
||||
# Save and set autocommit state
|
||||
try:
|
||||
originalAutocommit = conn.autocommit
|
||||
except Exception:
|
||||
originalAutocommit = False
|
||||
|
||||
logger.info(
|
||||
f"Multi-tenant optimizations applied: "
|
||||
f"{results['indexesCreated']} indexes, "
|
||||
f"{results['triggersCreated']} triggers, "
|
||||
f"{results['foreignKeysCreated']} foreign keys"
|
||||
)
|
||||
try:
|
||||
conn.autocommit = True
|
||||
except Exception as autoErr:
|
||||
logger.debug(f"Could not set autocommit: {autoErr}")
|
||||
|
||||
try:
|
||||
with conn.cursor() as cursor:
|
||||
# Apply indexes
|
||||
results["indexesCreated"] = _applyIndexes(cursor, tables)
|
||||
|
||||
# Apply foreign keys
|
||||
results["foreignKeysCreated"] = _applyForeignKeys(cursor, tables)
|
||||
|
||||
# Apply immutable triggers
|
||||
results["triggersCreated"] = _applyImmutableTriggers(cursor, tables)
|
||||
|
||||
logger.info(
|
||||
f"Multi-tenant optimizations applied: "
|
||||
f"{results['indexesCreated']} indexes, "
|
||||
f"{results['triggersCreated']} triggers, "
|
||||
f"{results['foreignKeysCreated']} foreign keys"
|
||||
)
|
||||
finally:
|
||||
# Restore original autocommit state
|
||||
try:
|
||||
conn.autocommit = originalAutocommit
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error applying multi-tenant optimizations: {e}")
|
||||
logger.error(f"Error applying multi-tenant optimizations: {type(e).__name__}: {e}")
|
||||
results["errors"].append(str(e))
|
||||
|
||||
return results
|
||||
|
|
@ -174,11 +207,15 @@ def applyMultiTenantOptimizations(dbConnector, tables: Optional[List[str]] = Non
|
|||
def applyIndexesOnly(dbConnector, tables: Optional[List[str]] = None) -> int:
|
||||
"""Apply only indexes (lighter operation, safe for frequent calls)."""
|
||||
try:
|
||||
conn = dbConnector._get_connection()
|
||||
conn = _getConnection(dbConnector)
|
||||
originalAutocommit = conn.autocommit
|
||||
conn.autocommit = True
|
||||
|
||||
with conn.cursor() as cursor:
|
||||
return _applyIndexes(cursor, tables)
|
||||
try:
|
||||
with conn.cursor() as cursor:
|
||||
return _applyIndexes(cursor, tables)
|
||||
finally:
|
||||
conn.autocommit = originalAutocommit
|
||||
except Exception as e:
|
||||
logger.error(f"Error applying indexes: {e}")
|
||||
return 0
|
||||
|
|
@ -194,9 +231,13 @@ def _tableExists(cursor, tableName: str) -> bool:
|
|||
SELECT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_name = %s
|
||||
)
|
||||
) AS exists
|
||||
""", (tableName,))
|
||||
return cursor.fetchone()[0]
|
||||
row = cursor.fetchone()
|
||||
# Handle both dict (RealDictCursor) and tuple results
|
||||
if isinstance(row, dict):
|
||||
return row.get('exists', False)
|
||||
return row[0] if row else False
|
||||
|
||||
|
||||
def _indexExists(cursor, indexName: str) -> bool:
|
||||
|
|
@ -205,9 +246,12 @@ def _indexExists(cursor, indexName: str) -> bool:
|
|||
SELECT EXISTS (
|
||||
SELECT FROM pg_indexes
|
||||
WHERE indexname = %s
|
||||
)
|
||||
) AS exists
|
||||
""", (indexName,))
|
||||
return cursor.fetchone()[0]
|
||||
row = cursor.fetchone()
|
||||
if isinstance(row, dict):
|
||||
return row.get('exists', False)
|
||||
return row[0] if row else False
|
||||
|
||||
|
||||
def _constraintExists(cursor, constraintName: str) -> bool:
|
||||
|
|
@ -216,9 +260,12 @@ def _constraintExists(cursor, constraintName: str) -> bool:
|
|||
SELECT EXISTS (
|
||||
SELECT FROM pg_constraint
|
||||
WHERE conname = %s
|
||||
)
|
||||
) AS exists
|
||||
""", (constraintName,))
|
||||
return cursor.fetchone()[0]
|
||||
row = cursor.fetchone()
|
||||
if isinstance(row, dict):
|
||||
return row.get('exists', False)
|
||||
return row[0] if row else False
|
||||
|
||||
|
||||
def _triggerExists(cursor, triggerName: str) -> bool:
|
||||
|
|
@ -227,9 +274,12 @@ def _triggerExists(cursor, triggerName: str) -> bool:
|
|||
SELECT EXISTS (
|
||||
SELECT FROM pg_trigger
|
||||
WHERE tgname = %s
|
||||
)
|
||||
) AS exists
|
||||
""", (triggerName,))
|
||||
return cursor.fetchone()[0]
|
||||
row = cursor.fetchone()
|
||||
if isinstance(row, dict):
|
||||
return row.get('exists', False)
|
||||
return row[0] if row else False
|
||||
|
||||
|
||||
def _applyIndexes(cursor, tables: Optional[List[str]]) -> int:
|
||||
|
|
@ -390,7 +440,7 @@ def getOptimizationStatus(dbConnector) -> dict:
|
|||
}
|
||||
|
||||
try:
|
||||
conn = dbConnector._get_connection()
|
||||
conn = _getConnection(dbConnector)
|
||||
with conn.cursor() as cursor:
|
||||
# Check regular indexes
|
||||
for tableName, indexName, _ in _INDEXES:
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult
|
||||
from modules.datamodels.datamodelChat import ActionResult
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
import logging
|
||||
import time
|
||||
from typing import Dict, Any, Optional, List
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelExtraction import ContentPart
|
||||
from modules.datamodels.datamodelAi import AiCallOptions, OperationTypeEnum, PriorityEnum, ProcessingModeEnum
|
||||
from modules.datamodels.datamodelWorkflow import AiResponse, DocumentData
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
import logging
|
||||
import time
|
||||
from typing import Dict, Any, Optional, List
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelExtraction import ContentPart
|
||||
from modules.datamodels.datamodelAi import AiCallOptions, OperationTypeEnum, PriorityEnum, ProcessingModeEnum
|
||||
from modules.datamodels.datamodelWorkflow import AiResponse, DocumentData
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import logging
|
|||
import time
|
||||
import json
|
||||
from typing import Dict, Any, List, Optional
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelAi import AiCallOptions
|
||||
from modules.datamodels.datamodelExtraction import ContentPart
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult
|
||||
from modules.datamodels.datamodelChat import ActionResult
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult
|
||||
from modules.datamodels.datamodelChat import ActionResult
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import logging
|
|||
import time
|
||||
import re
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import json
|
|||
import time
|
||||
from typing import Dict, Any
|
||||
from modules.workflows.methods.methodBase import action
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
from modules.connectors.connectorPreprocessor import PreprocessorConnector
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
import logging
|
||||
import time
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelDocref import DocumentReferenceList
|
||||
from modules.datamodels.datamodelExtraction import ExtractionOptions, MergeStrategy, ContentExtracted, ContentPart
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
import logging
|
||||
import json
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
import logging
|
||||
import time
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelDocref import DocumentReferenceList
|
||||
from modules.datamodels.datamodelExtraction import ContentExtracted, ContentPart
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import logging
|
|||
import json
|
||||
import aiohttp
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import logging
|
|||
import json
|
||||
import uuid
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import csv as csv_module
|
|||
from io import StringIO
|
||||
from datetime import datetime, UTC
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import csv as csv_module
|
|||
from io import BytesIO
|
||||
from datetime import datetime, UTC
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
import logging
|
||||
import json
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
import logging
|
||||
import json
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
import logging
|
||||
import json
|
||||
from typing import Dict, Any, List
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import json
|
|||
import io
|
||||
import pandas as pd
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import json
|
|||
import pandas as pd
|
||||
from io import BytesIO
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import json
|
|||
import base64
|
||||
import requests
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import time
|
|||
import json
|
||||
import requests
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import logging
|
|||
import json
|
||||
import requests
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import time
|
|||
import json
|
||||
import requests
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import time
|
|||
import json
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
import logging
|
||||
import json
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import json
|
|||
import base64
|
||||
import os
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import time
|
|||
import json
|
||||
import urllib.parse
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
import logging
|
||||
import json
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import time
|
|||
import json
|
||||
import urllib.parse
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import time
|
|||
import json
|
||||
import base64
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import time
|
|||
import json
|
||||
import urllib.parse
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import logging
|
|||
import json
|
||||
import base64
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@
|
|||
|
||||
import logging
|
||||
from typing import Dict, Any, List
|
||||
from modules.datamodels.datamodelChatbot import ActionResult, ActionItem, TaskStep
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionItem, TaskStep
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow
|
||||
from modules.workflows.processing.shared.methodDiscovery import methods
|
||||
from modules.workflows.processing.shared.stateTools import checkWorkflowStopped
|
||||
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@
|
|||
|
||||
import logging
|
||||
from typing import Dict, Any, Optional, List
|
||||
from modules.datamodels.datamodelChatbot import TaskPlan, TaskStep, ActionResult, ReviewResult
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow
|
||||
from modules.datamodels.datamodelChat import TaskPlan, TaskStep, ActionResult, ReviewResult
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow
|
||||
from modules.workflows.processing.shared.stateTools import checkWorkflowStopped
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
import json
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import TaskStep, TaskContext, TaskPlan
|
||||
from modules.datamodels.datamodelChat import TaskStep, TaskContext, TaskPlan
|
||||
from modules.datamodels.datamodelAi import AiCallOptions, OperationTypeEnum, ProcessingModeEnum, PriorityEnum
|
||||
from modules.workflows.processing.shared.promptGenerationTaskplan import (
|
||||
generateTaskPlanningPrompt
|
||||
|
|
@ -51,7 +51,7 @@ class TaskPlanner:
|
|||
|
||||
# Analyze user intent to obtain cleaned user objective for planning
|
||||
# SKIP intent analysis for AUTOMATION mode - it uses predefined JSON plans
|
||||
from modules.datamodels.datamodelChatbot import WorkflowModeEnum
|
||||
from modules.datamodels.datamodelChat import WorkflowModeEnum
|
||||
workflowMode = getattr(workflow, 'workflowMode', None)
|
||||
skipIntentionAnalysis = (workflowMode == WorkflowModeEnum.WORKFLOW_AUTOMATION)
|
||||
|
||||
|
|
|
|||
|
|
@ -7,11 +7,11 @@ import json
|
|||
import logging
|
||||
import uuid
|
||||
from typing import List, Dict, Any, Optional
|
||||
from modules.datamodels.datamodelChatbot import (
|
||||
from modules.datamodels.datamodelChat import (
|
||||
TaskStep, TaskContext, TaskResult, ActionItem, TaskStatus,
|
||||
TaskPlan, ActionResult
|
||||
)
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow
|
||||
from modules.workflows.processing.modes.modeBase import BaseMode
|
||||
from modules.workflows.processing.shared.stateTools import checkWorkflowStopped
|
||||
from modules.shared.timeUtils import parseTimestamp
|
||||
|
|
|
|||
|
|
@ -6,8 +6,8 @@
|
|||
from abc import ABC, abstractmethod
|
||||
import logging
|
||||
from typing import List, Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import TaskStep, TaskContext, TaskResult, ActionItem
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow
|
||||
from modules.datamodels.datamodelChat import TaskStep, TaskContext, TaskResult, ActionItem
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow
|
||||
from modules.workflows.processing.core.taskPlanner import TaskPlanner
|
||||
from modules.workflows.processing.core.actionExecutor import ActionExecutor
|
||||
from modules.workflows.processing.core.messageCreator import MessageCreator
|
||||
|
|
|
|||
|
|
@ -9,11 +9,11 @@ import re
|
|||
import time
|
||||
from datetime import datetime, timezone
|
||||
from typing import List, Dict, Any
|
||||
from modules.datamodels.datamodelChatbot import (
|
||||
from modules.datamodels.datamodelChat import (
|
||||
TaskStep, TaskContext, TaskResult, ActionItem, TaskStatus,
|
||||
ActionResult, Observation, ObservationPreview, ReviewResult
|
||||
)
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow
|
||||
from modules.workflows.processing.modes.modeBase import BaseMode
|
||||
from modules.workflows.processing.shared.stateTools import checkWorkflowStopped
|
||||
from modules.shared.timeUtils import parseTimestamp
|
||||
|
|
@ -893,7 +893,7 @@ class DynamicMode(BaseMode):
|
|||
async def _refineDecide(self, context: TaskContext, observation: Observation) -> ReviewResult:
|
||||
"""Refine: decide continue or stop, with reason"""
|
||||
# Create proper ReviewContext for extractReviewContent
|
||||
from modules.datamodels.datamodelChatbot import ReviewContext
|
||||
from modules.datamodels.datamodelChat import ReviewContext
|
||||
# Convert observation to dict for extractReviewContent (temporary compatibility)
|
||||
observationDict = {
|
||||
'success': observation.success,
|
||||
|
|
@ -1042,7 +1042,7 @@ class DynamicMode(BaseMode):
|
|||
|
||||
# Parse response using structured parsing with ReviewResult model
|
||||
from modules.shared.jsonUtils import parseJsonWithModel
|
||||
from modules.datamodels.datamodelChatbot import ReviewResult
|
||||
from modules.datamodels.datamodelChat import ReviewResult
|
||||
|
||||
if not resp:
|
||||
return ReviewResult(
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
from modules.datamodels.datamodelChatbot import TaskStep, ActionResult, Observation
|
||||
from modules.datamodels.datamodelChat import TaskStep, ActionResult, Observation
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -348,7 +348,7 @@ def extractReviewContent(context: Any) -> str:
|
|||
elif hasattr(context, 'observation') and context.observation:
|
||||
# For observation data, show full content but handle documents specially
|
||||
# Handle both Pydantic Observation model and dict format
|
||||
from modules.datamodels.datamodelChatbot import Observation
|
||||
from modules.datamodels.datamodelChat import Observation
|
||||
|
||||
if isinstance(context.observation, Observation):
|
||||
# Convert Pydantic model to dict
|
||||
|
|
@ -371,7 +371,7 @@ def extractReviewContent(context: Any) -> str:
|
|||
# For observation data in stepResult, show full content but handle documents specially
|
||||
observation = context.stepResult['observation']
|
||||
# Handle both Pydantic Observation model and dict format
|
||||
from modules.datamodels.datamodelChatbot import Observation
|
||||
from modules.datamodels.datamodelChat import Observation
|
||||
|
||||
if isinstance(observation, Observation):
|
||||
# Convert Pydantic model to dict
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ Handles prompt templates for dynamic mode action handling.
|
|||
|
||||
import json
|
||||
from typing import Any, List
|
||||
from modules.datamodels.datamodelChatbot import PromptBundle, PromptPlaceholder
|
||||
from modules.datamodels.datamodelChat import PromptBundle, PromptPlaceholder
|
||||
from modules.workflows.processing.shared.placeholderFactory import (
|
||||
extractUserPrompt,
|
||||
extractUserLanguage,
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ Handles prompt templates and extraction functions for task planning phase.
|
|||
|
||||
import logging
|
||||
from typing import Dict, Any, List
|
||||
from modules.datamodels.datamodelChatbot import PromptBundle, PromptPlaceholder
|
||||
from modules.datamodels.datamodelChat import PromptBundle, PromptPlaceholder
|
||||
from modules.workflows.processing.shared.placeholderFactory import (
|
||||
extractUserPrompt,
|
||||
extractAvailableDocumentsSummary,
|
||||
|
|
|
|||
|
|
@ -6,9 +6,9 @@
|
|||
import logging
|
||||
import json
|
||||
from typing import Dict, Any, Optional, List, TYPE_CHECKING
|
||||
from modules.datamodels import datamodelChatbot
|
||||
from modules.datamodels.datamodelChatbot import TaskStep, TaskContext, TaskPlan, ActionResult, ActionDocument, ChatDocument, ChatMessage
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow, WorkflowModeEnum
|
||||
from modules.datamodels import datamodelChat
|
||||
from modules.datamodels.datamodelChat import TaskStep, TaskContext, TaskPlan, ActionResult, ActionDocument, ChatDocument, ChatMessage
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow, WorkflowModeEnum
|
||||
from modules.workflows.processing.modes.modeBase import BaseMode
|
||||
from modules.workflows.processing.modes.modeDynamic import DynamicMode
|
||||
from modules.workflows.processing.modes.modeAutomation import AutomationMode
|
||||
|
|
@ -102,7 +102,7 @@ class WorkflowProcessor:
|
|||
self.services.chat.progressLogFinish(operationId, False)
|
||||
raise
|
||||
|
||||
async def executeTask(self, taskStep: TaskStep, workflow: ChatWorkflow, context: TaskContext) -> datamodelChatbot.TaskResult:
|
||||
async def executeTask(self, taskStep: TaskStep, workflow: ChatWorkflow, context: TaskContext) -> datamodelChat.TaskResult:
|
||||
"""Execute a task step using the appropriate mode"""
|
||||
import time
|
||||
|
||||
|
|
@ -494,7 +494,7 @@ class WorkflowProcessor:
|
|||
|
||||
# Create ActionResult with response
|
||||
# For fast path, we create a simple text document with the response
|
||||
from modules.datamodels.datamodelChatbot import ActionDocument
|
||||
from modules.datamodels.datamodelChat import ActionDocument
|
||||
|
||||
responseDoc = ActionDocument(
|
||||
documentName="fast_path_response.txt",
|
||||
|
|
@ -626,7 +626,7 @@ class WorkflowProcessor:
|
|||
ChatMessage with persisted documents
|
||||
"""
|
||||
try:
|
||||
from modules.datamodels.datamodelChatbot import ChatMessage, ChatDocument, ActionDocument
|
||||
from modules.datamodels.datamodelChat import ChatMessage, ChatDocument, ActionDocument
|
||||
from modules.workflows.processing.shared.stateTools import checkWorkflowStopped
|
||||
|
||||
# Check workflow status
|
||||
|
|
|
|||
|
|
@ -6,14 +6,14 @@ import uuid
|
|||
import asyncio
|
||||
import json
|
||||
|
||||
from modules.datamodels.datamodelChatbot import (
|
||||
from modules.datamodels.datamodelChat import (
|
||||
UserInputRequest,
|
||||
ChatMessage,
|
||||
ChatWorkflow,
|
||||
ChatDocument,
|
||||
WorkflowModeEnum
|
||||
)
|
||||
from modules.datamodels.datamodelChatbot import TaskContext
|
||||
from modules.datamodels.datamodelChat import TaskContext
|
||||
from modules.workflows.processing.workflowProcessor import WorkflowProcessor
|
||||
from modules.workflows.processing.shared.stateTools import WorkflowStoppedException, checkWorkflowStopped
|
||||
|
||||
|
|
@ -606,7 +606,7 @@ The following is the user's original input message. Analyze intent, normalize th
|
|||
|
||||
# Collect file info
|
||||
fileInfo = self.services.chat.getFileInfo(fileItem.id)
|
||||
from modules.datamodels.datamodelChatbot import ChatDocument
|
||||
from modules.datamodels.datamodelChat import ChatDocument
|
||||
doc = ChatDocument(
|
||||
fileId=fileItem.id,
|
||||
fileName=fileInfo.get("fileName", fileName) if fileInfo else fileName,
|
||||
|
|
@ -792,7 +792,7 @@ The following is the user's original input message. Analyze intent, normalize th
|
|||
|
||||
# Collect file info
|
||||
fileInfo = self.services.chat.getFileInfo(fileItem.id)
|
||||
from modules.datamodels.datamodelChatbot import ChatDocument
|
||||
from modules.datamodels.datamodelChat import ChatDocument
|
||||
doc = ChatDocument(
|
||||
fileId=fileItem.id,
|
||||
fileName=fileInfo.get("fileName", fileName) if fileInfo else fileName,
|
||||
|
|
@ -921,7 +921,7 @@ The following is the user's original input message. Analyze intent, normalize th
|
|||
# Persist task result for cross-task/round document references
|
||||
# Convert ChatTaskResult to WorkflowTaskResult for persistence
|
||||
from modules.datamodels.datamodelWorkflow import TaskResult as WorkflowTaskResult
|
||||
from modules.datamodels.datamodelChatbot import ActionResult
|
||||
from modules.datamodels.datamodelChat import ActionResult
|
||||
|
||||
# Get final ActionResult from task execution (last action result)
|
||||
finalActionResult = None
|
||||
|
|
|
|||
|
|
@ -85,7 +85,7 @@ class AIModelsTester:
|
|||
self.services.extraction = ExtractionService(self.services)
|
||||
|
||||
# Create a minimal workflow context
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow, WorkflowModeEnum
|
||||
import uuid
|
||||
|
||||
self.services.currentWorkflow = ChatWorkflow(
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ if _gateway_path not in sys.path:
|
|||
sys.path.insert(0, _gateway_path)
|
||||
|
||||
from modules.datamodels.datamodelAi import OperationTypeEnum
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow, ChatDocument, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow, ChatDocument, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelUam import User
|
||||
|
||||
|
||||
|
|
@ -174,7 +174,7 @@ class MethodAiOperationsTester:
|
|||
imageData = f.read()
|
||||
|
||||
# Create a ChatDocument
|
||||
from modules.datamodels.datamodelChatbot import ChatDocument
|
||||
from modules.datamodels.datamodelChat import ChatDocument
|
||||
import uuid
|
||||
|
||||
testImageDoc = ChatDocument(
|
||||
|
|
@ -186,7 +186,7 @@ class MethodAiOperationsTester:
|
|||
)
|
||||
|
||||
# Create a message with this document
|
||||
from modules.datamodels.datamodelChatbot import ChatMessage
|
||||
from modules.datamodels.datamodelChat import ChatMessage
|
||||
import time
|
||||
|
||||
testMessage = ChatMessage(
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ class AIBehaviorTester:
|
|||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
# Create and save workflow in database using the interface
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow, WorkflowModeEnum
|
||||
import uuid
|
||||
import time
|
||||
import modules.interfaces.interfaceDbChatbot as interfaceDbChatbot
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ if _gateway_path not in sys.path:
|
|||
|
||||
# Import the service initialization
|
||||
from modules.services import getInterface as getServices
|
||||
from modules.datamodels.datamodelChatbot import UserInputRequest, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelChat import UserInputRequest, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.features.workflow import chatStart
|
||||
import modules.interfaces.interfaceDbChatbot as interfaceDbChatbot
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ if _gateway_path not in sys.path:
|
|||
|
||||
# Import the service initialization
|
||||
from modules.services import getInterface as getServices
|
||||
from modules.datamodels.datamodelChatbot import UserInputRequest, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelChat import UserInputRequest, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.features.workflow import chatStart
|
||||
import modules.interfaces.interfaceDbChatbot as interfaceDbChatbot
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ if _gateway_path not in sys.path:
|
|||
|
||||
# Import the service initialization
|
||||
from modules.services import getInterface as getServices
|
||||
from modules.datamodels.datamodelChatbot import UserInputRequest, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelChat import UserInputRequest, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.features.workflow import chatStart
|
||||
import modules.interfaces.interfaceDbChatbot as interfaceDbChatbot
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ if _gateway_path not in sys.path:
|
|||
|
||||
# Import the service initialization
|
||||
from modules.services import getInterface as getServices
|
||||
from modules.datamodels.datamodelChatbot import UserInputRequest, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelChat import UserInputRequest, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.features.workflow import chatStart
|
||||
import modules.interfaces.interfaceDbChatbot as interfaceDbChatbot
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ if _gateway_path not in sys.path:
|
|||
|
||||
# Import the service initialization
|
||||
from modules.services import getInterface as getServices
|
||||
from modules.datamodels.datamodelChatbot import UserInputRequest, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelChat import UserInputRequest, WorkflowModeEnum
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.features.workflow import chatStart
|
||||
import modules.interfaces.interfaceDbChatbot as interfaceDbChatbot
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import pytest
|
|||
import uuid
|
||||
from unittest.mock import Mock, AsyncMock, patch
|
||||
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow, TaskContext, TaskStep
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow, TaskContext, TaskStep
|
||||
from modules.datamodels.datamodelWorkflow import ActionDefinition
|
||||
from modules.datamodels.datamodelDocref import DocumentReferenceList, DocumentListReference, DocumentItemReference
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ Tests state increment methods, helper methods, and updateFromSelection.
|
|||
import pytest
|
||||
import uuid
|
||||
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow, TaskContext, TaskStep
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow, TaskContext, TaskStep
|
||||
from modules.datamodels.datamodelWorkflow import ActionDefinition
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
|
|||
|
||||
from modules.datamodels.datamodelWorkflow import ActionDefinition, AiResponse
|
||||
from modules.datamodels.datamodelDocref import DocumentReferenceList, DocumentListReference
|
||||
from modules.datamodels.datamodelChatbot import ChatWorkflow
|
||||
from modules.datamodels.datamodelChat import ChatWorkflow
|
||||
from modules.shared.jsonUtils import parseJsonWithModel
|
||||
|
||||
|
||||
|
|
|
|||
428
tool_db_adapt_to_models.py
Normal file
428
tool_db_adapt_to_models.py
Normal file
|
|
@ -0,0 +1,428 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Datenbank-Anpassung an Pydantic-Modelle.
|
||||
|
||||
Einfaches Script das:
|
||||
1. Fehlende Felder in DB ergänzt (gemäss Pydantic-Modellen)
|
||||
2. Falsche Datentypen korrigiert (Daten werden ggf. gelöscht)
|
||||
3. Spezialfall: UserInDB.privilege → roleLabels migriert
|
||||
|
||||
Verwendung:
|
||||
python tool_db_adapt_to_models.py [--dry-run] [--db <database>]
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Any, Optional
|
||||
|
||||
# Gateway-Pfad setzen
|
||||
scriptPath = Path(__file__).resolve()
|
||||
gatewayPath = scriptPath.parent
|
||||
sys.path.insert(0, str(gatewayPath))
|
||||
os.chdir(str(gatewayPath))
|
||||
|
||||
# Logging ZUERST konfigurieren
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||
force=True # Überschreibt bestehende Config
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
import psycopg2
|
||||
import psycopg2.extras
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
|
||||
# Datenbank-Konfiguration: DB-Name → (Config-Prefix, Pydantic-Modelle)
|
||||
DATABASE_CONFIG = {
|
||||
"poweron_app": ("DB_APP", ["datamodelUam", "datamodelRbac", "datamodelSecurity"]),
|
||||
"poweron_chat": ("DB_CHAT", ["datamodelChat"]),
|
||||
"poweron_management": ("DB_MANAGEMENT", ["datamodelWorkflow", "datamodelFiles"]),
|
||||
}
|
||||
|
||||
# Python-Typ → PostgreSQL-Typ Mapping
|
||||
TYPE_MAPPING = {
|
||||
"str": "text",
|
||||
"int": "integer",
|
||||
"float": "double precision",
|
||||
"bool": "boolean",
|
||||
"list": "jsonb",
|
||||
"dict": "jsonb",
|
||||
"List": "jsonb",
|
||||
"Dict": "jsonb",
|
||||
"Optional": None, # Wird separat behandelt
|
||||
"datetime": "timestamp",
|
||||
"EmailStr": "text",
|
||||
"UUID": "uuid",
|
||||
}
|
||||
|
||||
|
||||
def _getDbConnection(dbName: str):
|
||||
"""Verbindet mit einer Datenbank über APP_CONFIG."""
|
||||
prefix = DATABASE_CONFIG.get(dbName, ("DB", []))[0]
|
||||
|
||||
host = APP_CONFIG.get(f"{prefix}_HOST") or APP_CONFIG.get("DB_HOST", "localhost")
|
||||
port = APP_CONFIG.get(f"{prefix}_PORT") or APP_CONFIG.get("DB_PORT", "5432")
|
||||
user = APP_CONFIG.get(f"{prefix}_USER") or APP_CONFIG.get("DB_USER")
|
||||
password = APP_CONFIG.get(f"{prefix}_PASSWORD_SECRET") or APP_CONFIG.get(f"{prefix}_PASSWORD") or APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD")
|
||||
|
||||
if not user or not password:
|
||||
logger.error(f"Keine Credentials für {dbName} ({prefix}_*)")
|
||||
return None
|
||||
|
||||
try:
|
||||
conn = psycopg2.connect(
|
||||
host=host, port=int(port), database=dbName,
|
||||
user=user, password=password,
|
||||
cursor_factory=psycopg2.extras.RealDictCursor
|
||||
)
|
||||
conn.autocommit = True
|
||||
return conn
|
||||
except Exception as e:
|
||||
logger.error(f"Verbindungsfehler {dbName}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def _getDbTables(conn) -> Dict[str, Dict[str, str]]:
|
||||
"""Holt alle Tabellen und deren Spalten aus der DB."""
|
||||
cur = conn.cursor()
|
||||
cur.execute("""
|
||||
SELECT table_name, column_name, data_type, is_nullable
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
ORDER BY table_name, ordinal_position
|
||||
""")
|
||||
|
||||
tables = {}
|
||||
for row in cur.fetchall():
|
||||
tableName = row['table_name']
|
||||
if tableName not in tables:
|
||||
tables[tableName] = {}
|
||||
tables[tableName][row['column_name']] = {
|
||||
'type': row['data_type'],
|
||||
'nullable': row['is_nullable'] == 'YES'
|
||||
}
|
||||
cur.close()
|
||||
return tables
|
||||
|
||||
|
||||
def _parsePydanticModels(moduleNames: List[str]) -> Dict[str, Dict[str, str]]:
|
||||
"""Parst Pydantic-Modelle aus den angegebenen Modulen."""
|
||||
import ast
|
||||
|
||||
models = {}
|
||||
datamodelsPath = gatewayPath / "modules" / "datamodels"
|
||||
|
||||
for moduleName in moduleNames:
|
||||
filePath = datamodelsPath / f"{moduleName}.py"
|
||||
if not filePath.exists():
|
||||
logger.warning(f"Modul nicht gefunden: {filePath}")
|
||||
continue
|
||||
|
||||
with open(filePath, 'r', encoding='utf-8') as f:
|
||||
tree = ast.parse(f.read())
|
||||
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.ClassDef):
|
||||
className = node.name
|
||||
|
||||
# Prüfe ob Klasse von BaseModel erbt
|
||||
isBaseModel = False
|
||||
for base in node.bases:
|
||||
if isinstance(base, ast.Name) and base.id == "BaseModel":
|
||||
isBaseModel = True
|
||||
break
|
||||
|
||||
if not isBaseModel:
|
||||
continue
|
||||
|
||||
fields = {}
|
||||
for item in node.body:
|
||||
if isinstance(item, ast.AnnAssign) and isinstance(item.target, ast.Name):
|
||||
fieldName = item.target.id
|
||||
if fieldName.startswith('_'):
|
||||
continue
|
||||
|
||||
# Typ extrahieren
|
||||
fieldType = _extractType(item.annotation)
|
||||
if fieldType:
|
||||
fields[fieldName] = fieldType
|
||||
|
||||
if fields:
|
||||
models[className] = fields
|
||||
|
||||
return models
|
||||
|
||||
|
||||
def _extractType(annotation) -> Optional[str]:
|
||||
"""Extrahiert den PostgreSQL-Typ aus einer AST-Annotation."""
|
||||
import ast
|
||||
|
||||
if isinstance(annotation, ast.Name):
|
||||
return TYPE_MAPPING.get(annotation.id, "text")
|
||||
|
||||
elif isinstance(annotation, ast.Subscript):
|
||||
# Optional[X], List[X], Dict[X, Y]
|
||||
if isinstance(annotation.value, ast.Name):
|
||||
outerType = annotation.value.id
|
||||
if outerType == "Optional":
|
||||
# Rekursiv den inneren Typ holen
|
||||
if isinstance(annotation.slice, ast.Name):
|
||||
return TYPE_MAPPING.get(annotation.slice.id, "text")
|
||||
elif isinstance(annotation.slice, ast.Subscript):
|
||||
return _extractType(annotation.slice)
|
||||
return "text"
|
||||
elif outerType in ("List", "list", "Dict", "dict"):
|
||||
return "jsonb"
|
||||
|
||||
elif isinstance(annotation, ast.Constant):
|
||||
return "text"
|
||||
|
||||
return "text"
|
||||
|
||||
|
||||
def _adaptTable(conn, tableName: str, modelFields: Dict[str, str], dbColumns: Dict[str, Any], dryRun: bool) -> bool:
|
||||
"""Passt eine Tabelle an das Pydantic-Modell an."""
|
||||
cur = conn.cursor()
|
||||
success = True
|
||||
|
||||
for fieldName, pgType in modelFields.items():
|
||||
# pgType kommt direkt aus _extractType (ist bereits PostgreSQL-Typ)
|
||||
|
||||
# Suche Spalte case-insensitive
|
||||
dbCol = None
|
||||
actualColName = None
|
||||
for colName, colInfo in dbColumns.items():
|
||||
if colName.lower() == fieldName.lower():
|
||||
dbCol = colInfo
|
||||
actualColName = colName
|
||||
break
|
||||
|
||||
if dbCol is None:
|
||||
# Spalte fehlt → hinzufügen
|
||||
sql = f'ALTER TABLE "{tableName}" ADD COLUMN "{fieldName}" {pgType}'
|
||||
if dryRun:
|
||||
logger.info(f"[DRY-RUN] {sql}")
|
||||
else:
|
||||
try:
|
||||
cur.execute(sql)
|
||||
logger.info(f"Spalte hinzugefügt: {tableName}.{fieldName} ({pgType})")
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Hinzufügen von {tableName}.{fieldName}: {e}")
|
||||
success = False
|
||||
else:
|
||||
# Spalte existiert → Typ prüfen
|
||||
currentType = dbCol['type']
|
||||
if not _typesCompatible(currentType, pgType):
|
||||
sql = f'ALTER TABLE "{tableName}" ALTER COLUMN "{actualColName}" TYPE {pgType} USING NULL'
|
||||
if dryRun:
|
||||
logger.info(f"[DRY-RUN] {sql}")
|
||||
else:
|
||||
try:
|
||||
cur.execute(sql)
|
||||
logger.info(f"Typ geändert: {tableName}.{actualColName} ({currentType} → {pgType})")
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Ändern von {tableName}.{actualColName}: {e}")
|
||||
success = False
|
||||
|
||||
cur.close()
|
||||
return success
|
||||
|
||||
|
||||
def _typesCompatible(dbType: str, targetType: str) -> bool:
|
||||
"""Prüft ob DB-Typ mit Ziel-Typ kompatibel ist."""
|
||||
dbType = dbType.lower()
|
||||
targetType = targetType.lower()
|
||||
|
||||
# Gleiche Typen
|
||||
if dbType == targetType:
|
||||
return True
|
||||
|
||||
# Kompatible Typen
|
||||
compatiblePairs = [
|
||||
("character varying", "text"),
|
||||
("varchar", "text"),
|
||||
("integer", "bigint"),
|
||||
("real", "double precision"),
|
||||
("timestamp without time zone", "timestamp"),
|
||||
("timestamp with time zone", "timestamp"),
|
||||
]
|
||||
|
||||
for a, b in compatiblePairs:
|
||||
if (dbType == a and targetType == b) or (dbType == b and targetType == a):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _migratePrivilegeToRoleLabels(conn, tableName: str, dryRun: bool) -> bool:
|
||||
"""Migriert privilege-Wert nach roleLabels (Spezialfall UserInDB)."""
|
||||
cur = conn.cursor()
|
||||
|
||||
# Prüfe ob beide Spalten existieren
|
||||
cur.execute("""
|
||||
SELECT column_name FROM information_schema.columns
|
||||
WHERE table_name = %s AND column_name IN ('privilege', 'roleLabels')
|
||||
""", (tableName,))
|
||||
columns = [row['column_name'] for row in cur.fetchall()]
|
||||
|
||||
if 'privilege' not in columns:
|
||||
logger.info(f"Spalte 'privilege' existiert nicht in {tableName} - keine Migration nötig")
|
||||
cur.close()
|
||||
return True
|
||||
|
||||
if 'roleLabels' not in columns:
|
||||
# roleLabels erstellen
|
||||
sql = f'ALTER TABLE "{tableName}" ADD COLUMN "roleLabels" jsonb'
|
||||
if dryRun:
|
||||
logger.info(f"[DRY-RUN] {sql}")
|
||||
cur.close()
|
||||
return True
|
||||
else:
|
||||
cur.execute(sql)
|
||||
logger.info(f"Spalte roleLabels in {tableName} erstellt")
|
||||
|
||||
# Migriere privilege → roleLabels
|
||||
cur.execute(f"""
|
||||
SELECT id, privilege, "roleLabels" FROM "{tableName}"
|
||||
WHERE privilege IS NOT NULL AND privilege != ''
|
||||
""")
|
||||
users = cur.fetchall()
|
||||
|
||||
if not users:
|
||||
logger.info(f"Keine Einträge mit privilege-Wert in {tableName}")
|
||||
cur.close()
|
||||
return True
|
||||
|
||||
logger.info(f"Migriere {len(users)} Einträge: privilege → roleLabels")
|
||||
|
||||
for user in users:
|
||||
userId = user['id']
|
||||
privilege = user['privilege']
|
||||
roleLabels = user['roleLabels'] or []
|
||||
|
||||
if isinstance(roleLabels, str):
|
||||
try:
|
||||
roleLabels = json.loads(roleLabels)
|
||||
except:
|
||||
roleLabels = []
|
||||
|
||||
if privilege not in roleLabels:
|
||||
roleLabels.append(privilege)
|
||||
|
||||
if dryRun:
|
||||
logger.info(f"[DRY-RUN] UPDATE {tableName} SET roleLabels = {roleLabels} WHERE id = {userId}")
|
||||
else:
|
||||
cur.execute(
|
||||
f'UPDATE "{tableName}" SET "roleLabels" = %s WHERE id = %s',
|
||||
(json.dumps(roleLabels), userId)
|
||||
)
|
||||
|
||||
if not dryRun:
|
||||
logger.info(f"Migration privilege → roleLabels abgeschlossen")
|
||||
|
||||
cur.close()
|
||||
return True
|
||||
|
||||
|
||||
def runMigration(dbName: str, dryRun: bool) -> bool:
|
||||
"""Führt Migration für eine Datenbank durch."""
|
||||
logger.info(f"\n{'='*60}")
|
||||
logger.info(f"DATENBANK: {dbName}")
|
||||
logger.info(f"{'='*60}")
|
||||
|
||||
if dbName not in DATABASE_CONFIG:
|
||||
logger.error(f"Unbekannte Datenbank: {dbName}")
|
||||
return False
|
||||
|
||||
conn = _getDbConnection(dbName)
|
||||
if not conn:
|
||||
return False
|
||||
|
||||
prefix, moduleNames = DATABASE_CONFIG[dbName]
|
||||
|
||||
# DB-Schema laden
|
||||
dbTables = _getDbTables(conn)
|
||||
logger.info(f"Tabellen in DB: {', '.join(dbTables.keys()) if dbTables else 'keine'}")
|
||||
|
||||
# Pydantic-Modelle laden
|
||||
models = _parsePydanticModels(moduleNames)
|
||||
logger.info(f"Pydantic-Modelle: {', '.join(models.keys()) if models else 'keine'}")
|
||||
|
||||
success = True
|
||||
|
||||
# Jedes Modell mit passender DB-Tabelle abgleichen
|
||||
for modelName, modelFields in models.items():
|
||||
# Finde passende Tabelle (case-insensitive)
|
||||
tableName = None
|
||||
tableColumns = None
|
||||
for dbTable, dbCols in dbTables.items():
|
||||
if dbTable.lower() == modelName.lower():
|
||||
tableName = dbTable
|
||||
tableColumns = dbCols
|
||||
break
|
||||
|
||||
if tableName is None:
|
||||
logger.warning(f"Keine Tabelle für Modell {modelName} gefunden - übersprungen")
|
||||
continue
|
||||
|
||||
logger.info(f"\nPrüfe {modelName} ↔ {tableName}...")
|
||||
|
||||
# Tabelle anpassen
|
||||
if not _adaptTable(conn, tableName, modelFields, tableColumns, dryRun):
|
||||
success = False
|
||||
|
||||
# Spezialfall: UserInDB privilege → roleLabels
|
||||
if modelName == "UserInDB":
|
||||
if not _migratePrivilegeToRoleLabels(conn, tableName, dryRun):
|
||||
success = False
|
||||
|
||||
conn.close()
|
||||
return success
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Passt DB-Struktur an Pydantic-Modelle an")
|
||||
parser.add_argument("--dry-run", action="store_true", help="Zeigt nur geplante Änderungen")
|
||||
parser.add_argument("--db", help="Nur bestimmte DB(s) migrieren (komma-getrennt)")
|
||||
args = parser.parse_args()
|
||||
|
||||
databases = list(DATABASE_CONFIG.keys())
|
||||
if args.db:
|
||||
databases = [db.strip() for db in args.db.split(",")]
|
||||
|
||||
logger.info("="*60)
|
||||
logger.info("DATENBANK-ANPASSUNG AN PYDANTIC-MODELLE")
|
||||
logger.info("="*60)
|
||||
logger.info(f"Modus: {'DRY-RUN' if args.dry_run else 'LIVE'}")
|
||||
logger.info(f"Datenbanken: {', '.join(databases)}")
|
||||
|
||||
if not args.dry_run:
|
||||
response = input("\nÄnderungen durchführen? (yes/no): ")
|
||||
if response.lower() != "yes":
|
||||
logger.info("Abgebrochen")
|
||||
return 0
|
||||
|
||||
allSuccess = True
|
||||
for dbName in databases:
|
||||
if not runMigration(dbName, args.dry_run):
|
||||
allSuccess = False
|
||||
|
||||
if allSuccess:
|
||||
logger.info("\n" + "="*60)
|
||||
logger.info("ALLE MIGRATIONEN ERFOLGREICH")
|
||||
logger.info("="*60)
|
||||
else:
|
||||
logger.error("\n" + "="*60)
|
||||
logger.error("EINIGE MIGRATIONEN HATTEN FEHLER")
|
||||
logger.error("="*60)
|
||||
|
||||
return 0 if allSuccess else 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
|
|
@ -36,9 +36,36 @@ from datetime import datetime
|
|||
from typing import Dict, List, Any, Optional
|
||||
from pathlib import Path
|
||||
|
||||
# Add gateway to path for imports and set working directory
|
||||
# Find gateway directory (could be in local/pending/ or gateway/)
|
||||
scriptPath = Path(__file__).resolve()
|
||||
gatewayPath = scriptPath.parent
|
||||
# If we're in local/pending/, go up to find gateway/
|
||||
if gatewayPath.name == "pending":
|
||||
gatewayPath = gatewayPath.parent.parent / "gateway"
|
||||
elif gatewayPath.name == "local":
|
||||
gatewayPath = gatewayPath.parent / "gateway"
|
||||
# If gateway doesn't exist, try current directory
|
||||
if not gatewayPath.exists():
|
||||
gatewayPath = Path(__file__).parent.parent.parent / "gateway"
|
||||
if gatewayPath.exists():
|
||||
sys.path.insert(0, str(gatewayPath))
|
||||
# Change working directory to gateway so APP_CONFIG can find .env file
|
||||
os.chdir(str(gatewayPath))
|
||||
else:
|
||||
# Fallback: assume we're already in gateway/ or add parent
|
||||
sys.path.insert(0, str(Path(__file__).parent))
|
||||
# Try to change to gateway directory if it exists
|
||||
potentialGateway = Path(__file__).parent
|
||||
if potentialGateway.exists() and (potentialGateway / "modules" / "shared" / "configuration.py").exists():
|
||||
os.chdir(str(potentialGateway))
|
||||
|
||||
import psycopg2
|
||||
import psycopg2.extras
|
||||
|
||||
# Use the real APP_CONFIG which handles encryption and environment-specific configs
|
||||
from modules.shared.configuration import APP_CONFIG # type: ignore
|
||||
|
||||
# Logging konfigurieren
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
|
|
@ -47,6 +74,27 @@ logging.basicConfig(
|
|||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Force APP_CONFIG to refresh after changing working directory
|
||||
# This ensures .env file is loaded from the correct location
|
||||
try:
|
||||
APP_CONFIG.refresh()
|
||||
envType = APP_CONFIG.get('APP_ENV_TYPE', 'unknown')
|
||||
keySysVar = APP_CONFIG.get('APP_KEY_SYSVAR', 'not_set')
|
||||
logger.debug(f"APP_CONFIG refreshed. Environment type: {envType}")
|
||||
logger.debug(f"APP_KEY_SYSVAR: {keySysVar}")
|
||||
logger.debug(f"Current working directory: {os.getcwd()}")
|
||||
|
||||
# Check if master key is available (needed for decrypting secrets)
|
||||
if keySysVar != 'not_set':
|
||||
masterKeyEnv = os.environ.get(keySysVar)
|
||||
if masterKeyEnv:
|
||||
logger.debug(f"Master key found in environment variable: {keySysVar}")
|
||||
else:
|
||||
logger.warning(f"Master key not found in environment variable: {keySysVar}")
|
||||
logger.warning("Encrypted secrets may not be decryptable!")
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not refresh APP_CONFIG: {e}")
|
||||
|
||||
# Alle PowerOn Datenbanken
|
||||
ALL_DATABASES = [
|
||||
"poweron_app", # Haupt-App: User, Mandate, RBAC, Features
|
||||
|
|
@ -56,65 +104,62 @@ ALL_DATABASES = [
|
|||
"poweron_trustee", # Trustee
|
||||
]
|
||||
|
||||
|
||||
def _loadEnvConfig() -> Dict[str, str]:
|
||||
"""Lädt die Konfiguration direkt aus der .env Datei."""
|
||||
config = {}
|
||||
envPath = Path(__file__).parent / '.env'
|
||||
|
||||
if not envPath.exists():
|
||||
logger.warning(f"Environment file not found at {envPath}")
|
||||
return config
|
||||
|
||||
# Versuche verschiedene Encodings
|
||||
encodings = ['utf-8', 'utf-8-sig', 'latin-1', 'cp1252']
|
||||
|
||||
for encoding in encodings:
|
||||
try:
|
||||
with open(envPath, 'r', encoding=encoding) as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line or line.startswith('#'):
|
||||
continue
|
||||
if '=' in line:
|
||||
key, value = line.split('=', 1)
|
||||
config[key.strip()] = value.strip()
|
||||
# Erfolgreich geladen
|
||||
return config
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading .env file with {encoding}: {e}")
|
||||
continue
|
||||
|
||||
logger.error(f"Could not load .env file with any encoding")
|
||||
return config
|
||||
|
||||
|
||||
# Globale Konfiguration laden
|
||||
_ENV_CONFIG = _loadEnvConfig()
|
||||
# Datenbank-Konfiguration: Mapping von DB-Name zu Config-Prefix
|
||||
# Jede Datenbank hat ihre eigenen Variablen: DB_APP_HOST, DB_CHAT_HOST, etc.
|
||||
DATABASE_CONFIG = {
|
||||
"poweron_app": "DB_APP", # DB_APP_HOST, DB_APP_USER, DB_APP_PASSWORD_SECRET, etc.
|
||||
"poweron_chat": "DB_CHAT", # DB_CHAT_HOST, DB_CHAT_USER, etc.
|
||||
"poweron_management": "DB_MANAGEMENT",
|
||||
"poweron_realestate": "DB_REALESTATE",
|
||||
"poweron_trustee": "DB_TRUSTEE",
|
||||
}
|
||||
|
||||
|
||||
def _getConfigValue(key: str, default: str = None) -> str:
|
||||
"""Holt einen Konfigurationswert."""
|
||||
return _ENV_CONFIG.get(key, os.environ.get(key, default))
|
||||
"""Holt einen Konfigurationswert über APP_CONFIG (unterstützt Verschlüsselung)."""
|
||||
return APP_CONFIG.get(key, default)
|
||||
|
||||
|
||||
def _getDbConfig(dbName: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Holt die Datenbankverbindungs-Konfiguration für eine spezifische Datenbank.
|
||||
Unterstützt sowohl DB-spezifische Variablen (DB_APP_HOST) als auch Fallback (DB_HOST).
|
||||
"""
|
||||
prefix = DATABASE_CONFIG.get(dbName, "DB")
|
||||
|
||||
# Versuche zuerst DB-spezifische Variablen, dann Fallback auf allgemeine
|
||||
host = _getConfigValue(f"{prefix}_HOST") or _getConfigValue("DB_HOST", "localhost")
|
||||
user = _getConfigValue(f"{prefix}_USER") or _getConfigValue("DB_USER")
|
||||
password = _getConfigValue(f"{prefix}_PASSWORD_SECRET") or _getConfigValue("DB_PASSWORD_SECRET")
|
||||
port = _getConfigValue(f"{prefix}_PORT") or _getConfigValue("DB_PORT", "5432")
|
||||
|
||||
return {
|
||||
"host": host,
|
||||
"user": user,
|
||||
"password": password,
|
||||
"port": int(port) if port else 5432
|
||||
}
|
||||
|
||||
|
||||
def _databaseExists(dbDatabase: str) -> bool:
|
||||
"""Prüft ob eine Datenbank existiert."""
|
||||
dbHost = _getConfigValue("DB_HOST", "localhost")
|
||||
dbUser = _getConfigValue("DB_USER")
|
||||
dbPassword = _getConfigValue("DB_PASSWORD_SECRET")
|
||||
dbPort = int(_getConfigValue("DB_PORT", "5432"))
|
||||
config = _getDbConfig(dbDatabase)
|
||||
|
||||
if not config["user"]:
|
||||
logger.warning(f"DB-User nicht gesetzt für Datenbank {dbDatabase}")
|
||||
return False
|
||||
if not config["password"]:
|
||||
logger.warning(f"DB-Password nicht gesetzt für Datenbank {dbDatabase}")
|
||||
return False
|
||||
|
||||
try:
|
||||
# Verbinde zur postgres Datenbank um zu prüfen
|
||||
conn = psycopg2.connect(
|
||||
host=dbHost,
|
||||
port=dbPort,
|
||||
host=config["host"],
|
||||
port=config["port"],
|
||||
database="postgres",
|
||||
user=dbUser,
|
||||
password=dbPassword
|
||||
user=config["user"],
|
||||
password=config["password"]
|
||||
)
|
||||
conn.autocommit = True
|
||||
|
||||
|
|
@ -128,37 +173,43 @@ def _databaseExists(dbDatabase: str) -> bool:
|
|||
conn.close()
|
||||
return exists
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Prüfen der Datenbank {dbDatabase}: {e}")
|
||||
logger.error(f"Fehler beim Prüfen der Datenbank {dbDatabase} auf {config['host']}:{config['port']}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def _getDbConnection(dbDatabase: str):
|
||||
"""Erstellt eine Verbindung zu einer spezifischen PostgreSQL-Datenbank."""
|
||||
# Erst prüfen ob Datenbank existiert
|
||||
if not _databaseExists(dbDatabase):
|
||||
logger.warning(f"Datenbank '{dbDatabase}' existiert nicht - übersprungen")
|
||||
config = _getDbConfig(dbDatabase)
|
||||
|
||||
# Prüfe ob wichtige Konfigurationswerte fehlen
|
||||
if not config["user"]:
|
||||
logger.error(f"DB-User nicht gesetzt für {dbDatabase} - kann keine Verbindung herstellen")
|
||||
return None
|
||||
if not config["password"]:
|
||||
logger.error(f"DB-Password nicht gesetzt für {dbDatabase} - kann keine Verbindung herstellen")
|
||||
return None
|
||||
|
||||
dbHost = _getConfigValue("DB_HOST", "localhost")
|
||||
dbUser = _getConfigValue("DB_USER")
|
||||
dbPassword = _getConfigValue("DB_PASSWORD_SECRET")
|
||||
dbPort = int(_getConfigValue("DB_PORT", "5432"))
|
||||
# Erst prüfen ob Datenbank existiert
|
||||
if not _databaseExists(dbDatabase):
|
||||
logger.warning(f"Datenbank '{dbDatabase}' existiert nicht auf {config['host']}:{config['port']} - übersprungen")
|
||||
return None
|
||||
|
||||
try:
|
||||
conn = psycopg2.connect(
|
||||
host=dbHost,
|
||||
port=dbPort,
|
||||
host=config["host"],
|
||||
port=config["port"],
|
||||
database=dbDatabase,
|
||||
user=dbUser,
|
||||
password=dbPassword,
|
||||
user=config["user"],
|
||||
password=config["password"],
|
||||
cursor_factory=psycopg2.extras.RealDictCursor
|
||||
)
|
||||
# Autocommit muss VOR set_client_encoding gesetzt werden, um Transaction-Konflikte zu vermeiden
|
||||
conn.autocommit = True
|
||||
conn.set_client_encoding('UTF8')
|
||||
logger.debug(f"Erfolgreich verbunden mit {config['host']}:{config['port']}/{dbDatabase}")
|
||||
return conn
|
||||
except Exception as e:
|
||||
logger.error(f"Datenbankverbindung zu {dbDatabase} fehlgeschlagen: {e}")
|
||||
logger.error(f"Datenbankverbindung zu {dbDatabase} auf {config['host']}:{config['port']} fehlgeschlagen: {e}")
|
||||
raise
|
||||
|
||||
|
||||
|
|
@ -504,6 +555,23 @@ def exportDatabase(
|
|||
# Welche Datenbanken exportieren?
|
||||
databasesToExport = onlyDatabases if onlyDatabases else ALL_DATABASES
|
||||
|
||||
# Prüfe ob grundlegende Konfigurationswerte vorhanden sind
|
||||
# APP_CONFIG.get() entschlüsselt automatisch Werte, die mit _SECRET enden
|
||||
# Jede Datenbank hat ihre eigenen Variablen (DB_APP_USER, DB_CHAT_USER, etc.)
|
||||
missingConfigs = []
|
||||
for dbName in databasesToExport:
|
||||
config = _getDbConfig(dbName)
|
||||
if not config["user"] or not config["password"]:
|
||||
missingConfigs.append(f"{dbName}: User={'gesetzt' if config['user'] else 'FEHLT'}, Password={'gesetzt' if config['password'] else 'FEHLT'}")
|
||||
|
||||
if missingConfigs:
|
||||
logger.error("WICHTIG: Einige Datenbank-Konfigurationen fehlen!")
|
||||
for missing in missingConfigs:
|
||||
logger.error(f" {missing}")
|
||||
logger.error(" Bitte Umgebungsvariablen setzen oder .env Datei konfigurieren")
|
||||
logger.error(" Hinweis: Verschlüsselte Secrets benötigen APP_KEY_SYSVAR Umgebungsvariable!")
|
||||
logger.error(" Erwartete Variablen: DB_APP_USER, DB_CHAT_USER, DB_MANAGEMENT_USER, etc.")
|
||||
|
||||
# Standard-Ausgabepfad generieren (im Log-Ordner)
|
||||
if not outputPath:
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
|
|
|
|||
|
|
@ -1,612 +0,0 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
Datenbank Import-Tool für Migration.
|
||||
|
||||
Dieses Script importiert Daten aus einer JSON-Migrationsdatei
|
||||
in ALLE PowerOn PostgreSQL-Datenbanken.
|
||||
|
||||
ACHTUNG: Dieses Script kann bestehende Daten überschreiben!
|
||||
Bitte vor dem Import ein Backup erstellen.
|
||||
|
||||
Datenbanken:
|
||||
- poweron_app (User, Mandate, RBAC, Features, etc.)
|
||||
- poweron_chat (Chat-Konversationen und Nachrichten)
|
||||
- poweron_management (Workflows, Prompts, Connections, etc.)
|
||||
- poweron_realestate (Real Estate Daten)
|
||||
- poweron_trustee (Trustee Daten)
|
||||
|
||||
Verwendung:
|
||||
python tool_db_import_migration.py <import_file.json> [--dry-run] [--force]
|
||||
|
||||
Optionen:
|
||||
--dry-run Simuliert den Import ohne Änderungen
|
||||
--force Bestätigung überspringen
|
||||
--clear-first Tabellen vor dem Import leeren
|
||||
--only-tables Komma-getrennte Liste von Tabellen (nur diese importieren)
|
||||
--only-db Komma-getrennte Liste von Datenbanken (nur diese importieren)
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Any, Optional
|
||||
from pathlib import Path
|
||||
|
||||
import psycopg2
|
||||
import psycopg2.extras
|
||||
|
||||
# Logging konfigurieren
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||
datefmt='%Y-%m-%d %H:%M:%S'
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Alle PowerOn Datenbanken
|
||||
ALL_DATABASES = [
|
||||
"poweron_app",
|
||||
"poweron_chat",
|
||||
"poweron_management",
|
||||
"poweron_realestate",
|
||||
"poweron_trustee",
|
||||
]
|
||||
|
||||
|
||||
def _loadEnvConfig() -> Dict[str, str]:
|
||||
"""Lädt die Konfiguration direkt aus der .env Datei."""
|
||||
config = {}
|
||||
envPath = Path(__file__).parent / '.env'
|
||||
|
||||
if not envPath.exists():
|
||||
logger.warning(f"Environment file not found at {envPath}")
|
||||
return config
|
||||
|
||||
# Versuche verschiedene Encodings
|
||||
encodings = ['utf-8', 'utf-8-sig', 'latin-1', 'cp1252']
|
||||
|
||||
for encoding in encodings:
|
||||
try:
|
||||
with open(envPath, 'r', encoding=encoding) as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line or line.startswith('#'):
|
||||
continue
|
||||
if '=' in line:
|
||||
key, value = line.split('=', 1)
|
||||
config[key.strip()] = value.strip()
|
||||
# Erfolgreich geladen
|
||||
return config
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading .env file with {encoding}: {e}")
|
||||
continue
|
||||
|
||||
logger.error(f"Could not load .env file with any encoding")
|
||||
return config
|
||||
|
||||
|
||||
# Globale Konfiguration laden
|
||||
_ENV_CONFIG = _loadEnvConfig()
|
||||
|
||||
|
||||
def _getConfigValue(key: str, default: str = None) -> str:
|
||||
"""Holt einen Konfigurationswert."""
|
||||
return _ENV_CONFIG.get(key, os.environ.get(key, default))
|
||||
|
||||
|
||||
def _getUtcTimestamp() -> float:
|
||||
"""Gibt den aktuellen UTC-Timestamp zurück."""
|
||||
return time.time()
|
||||
|
||||
|
||||
def _databaseExists(dbDatabase: str) -> bool:
|
||||
"""Prüft ob eine Datenbank existiert."""
|
||||
dbHost = _getConfigValue("DB_HOST", "localhost")
|
||||
dbUser = _getConfigValue("DB_USER")
|
||||
dbPassword = _getConfigValue("DB_PASSWORD_SECRET")
|
||||
dbPort = int(_getConfigValue("DB_PORT", "5432"))
|
||||
|
||||
try:
|
||||
conn = psycopg2.connect(
|
||||
host=dbHost,
|
||||
port=dbPort,
|
||||
database="postgres",
|
||||
user=dbUser,
|
||||
password=dbPassword
|
||||
)
|
||||
conn.autocommit = True
|
||||
|
||||
with conn.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"SELECT 1 FROM pg_database WHERE datname = %s",
|
||||
(dbDatabase,)
|
||||
)
|
||||
exists = cursor.fetchone() is not None
|
||||
|
||||
conn.close()
|
||||
return exists
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Prüfen der Datenbank {dbDatabase}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def _getDbConnection(dbDatabase: str, autocommit: bool = False):
|
||||
"""Erstellt eine Verbindung zu einer spezifischen PostgreSQL-Datenbank."""
|
||||
# Erst prüfen ob Datenbank existiert
|
||||
if not _databaseExists(dbDatabase):
|
||||
logger.warning(f"Datenbank '{dbDatabase}' existiert nicht")
|
||||
return None
|
||||
|
||||
dbHost = _getConfigValue("DB_HOST", "localhost")
|
||||
dbUser = _getConfigValue("DB_USER")
|
||||
dbPassword = _getConfigValue("DB_PASSWORD_SECRET")
|
||||
dbPort = int(_getConfigValue("DB_PORT", "5432"))
|
||||
|
||||
try:
|
||||
conn = psycopg2.connect(
|
||||
host=dbHost,
|
||||
port=dbPort,
|
||||
database=dbDatabase,
|
||||
user=dbUser,
|
||||
password=dbPassword,
|
||||
cursor_factory=psycopg2.extras.RealDictCursor
|
||||
)
|
||||
conn.set_client_encoding('UTF8')
|
||||
conn.autocommit = autocommit
|
||||
return conn
|
||||
except Exception as e:
|
||||
logger.error(f"Datenbankverbindung zu {dbDatabase} fehlgeschlagen: {e}")
|
||||
raise
|
||||
|
||||
|
||||
def _getExistingTables(conn) -> List[str]:
|
||||
"""Gibt alle Tabellennamen in der Datenbank zurück."""
|
||||
with conn.cursor() as cursor:
|
||||
cursor.execute("""
|
||||
SELECT table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_type = 'BASE TABLE'
|
||||
ORDER BY table_name
|
||||
""")
|
||||
tables = [row["table_name"] for row in cursor.fetchall()]
|
||||
return tables
|
||||
|
||||
|
||||
def _getTableColumns(conn, tableName: str) -> List[str]:
|
||||
"""Gibt alle Spalten einer Tabelle zurück."""
|
||||
with conn.cursor() as cursor:
|
||||
cursor.execute("""
|
||||
SELECT column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = %s AND table_schema = 'public'
|
||||
""", (tableName,))
|
||||
columns = [row["column_name"] for row in cursor.fetchall()]
|
||||
return columns
|
||||
|
||||
|
||||
def _clearTable(conn, tableName: str):
|
||||
"""Löscht alle Daten aus einer Tabelle."""
|
||||
with conn.cursor() as cursor:
|
||||
cursor.execute(f'DELETE FROM "{tableName}"')
|
||||
|
||||
|
||||
def _insertRecord(conn, tableName: str, record: Dict[str, Any], existingColumns: List[str]) -> bool:
|
||||
"""Fügt einen Datensatz in eine Tabelle ein (UPSERT)."""
|
||||
filteredRecord = {k: v for k, v in record.items() if k in existingColumns}
|
||||
|
||||
if not filteredRecord:
|
||||
return False
|
||||
|
||||
# Metadaten hinzufügen falls nicht vorhanden
|
||||
currentTime = _getUtcTimestamp()
|
||||
if "_createdAt" not in filteredRecord and "_createdAt" in existingColumns:
|
||||
filteredRecord["_createdAt"] = currentTime
|
||||
if "_modifiedAt" in existingColumns:
|
||||
filteredRecord["_modifiedAt"] = currentTime
|
||||
|
||||
columns = list(filteredRecord.keys())
|
||||
values = []
|
||||
|
||||
for col in columns:
|
||||
value = filteredRecord[col]
|
||||
if isinstance(value, (dict, list)):
|
||||
values.append(json.dumps(value))
|
||||
else:
|
||||
values.append(value)
|
||||
|
||||
colNames = ", ".join([f'"{col}"' for col in columns])
|
||||
placeholders = ", ".join(["%s"] * len(columns))
|
||||
|
||||
updateCols = [col for col in columns if col not in ["id", "_createdAt", "_createdBy"]]
|
||||
updateClause = ", ".join([f'"{col}" = EXCLUDED."{col}"' for col in updateCols])
|
||||
|
||||
if updateClause:
|
||||
sql = f'''
|
||||
INSERT INTO "{tableName}" ({colNames})
|
||||
VALUES ({placeholders})
|
||||
ON CONFLICT ("id") DO UPDATE SET {updateClause}
|
||||
'''
|
||||
else:
|
||||
sql = f'''
|
||||
INSERT INTO "{tableName}" ({colNames})
|
||||
VALUES ({placeholders})
|
||||
ON CONFLICT ("id") DO NOTHING
|
||||
'''
|
||||
|
||||
try:
|
||||
with conn.cursor() as cursor:
|
||||
cursor.execute(sql, values)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Fehler beim Einfügen in {tableName}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def loadMigrationFile(filePath: str) -> Dict[str, Any]:
|
||||
"""Lädt die Migrationsdatei."""
|
||||
logger.info(f"Lade Migrationsdatei: {filePath}")
|
||||
|
||||
if not os.path.exists(filePath):
|
||||
raise FileNotFoundError(f"Datei nicht gefunden: {filePath}")
|
||||
|
||||
with open(filePath, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
# Validierung - unterstütze beide Formate (alt: tables, neu: databases)
|
||||
if "databases" not in data and "tables" not in data:
|
||||
raise ValueError("Ungültiges Migrationsformat: 'databases' oder 'tables' erforderlich")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def _importSingleDatabase(
|
||||
dbName: str,
|
||||
dbData: Dict[str, Any],
|
||||
dryRun: bool,
|
||||
clearFirst: bool,
|
||||
onlyTables: Optional[List[str]]
|
||||
) -> Dict[str, Any]:
|
||||
"""Importiert Daten in eine einzelne Datenbank."""
|
||||
stats = {
|
||||
"imported": {},
|
||||
"skipped": {},
|
||||
"errors": {},
|
||||
"totalImported": 0,
|
||||
"totalSkipped": 0,
|
||||
"totalErrors": 0
|
||||
}
|
||||
|
||||
conn = _getDbConnection(dbName)
|
||||
if conn is None:
|
||||
logger.warning(f" Datenbank '{dbName}' existiert nicht - übersprungen")
|
||||
return stats
|
||||
|
||||
try:
|
||||
existingTables = _getExistingTables(conn)
|
||||
tables = dbData.get("tables", {})
|
||||
|
||||
tablesToImport = list(tables.keys())
|
||||
if onlyTables:
|
||||
tablesToImport = [t for t in tablesToImport if t in onlyTables]
|
||||
|
||||
for tableName in tablesToImport:
|
||||
records = tables[tableName]
|
||||
|
||||
if tableName not in existingTables:
|
||||
logger.warning(f" Tabelle '{tableName}' existiert nicht - übersprungen")
|
||||
stats["skipped"][tableName] = len(records)
|
||||
stats["totalSkipped"] += len(records)
|
||||
continue
|
||||
|
||||
if dryRun:
|
||||
stats["imported"][tableName] = len(records)
|
||||
stats["totalImported"] += len(records)
|
||||
continue
|
||||
|
||||
if clearFirst:
|
||||
_clearTable(conn, tableName)
|
||||
|
||||
existingColumns = _getTableColumns(conn, tableName)
|
||||
|
||||
imported = 0
|
||||
errors = 0
|
||||
|
||||
for record in records:
|
||||
if _insertRecord(conn, tableName, record, existingColumns):
|
||||
imported += 1
|
||||
else:
|
||||
errors += 1
|
||||
|
||||
stats["imported"][tableName] = imported
|
||||
stats["totalImported"] += imported
|
||||
|
||||
if errors > 0:
|
||||
stats["errors"][tableName] = errors
|
||||
stats["totalErrors"] += errors
|
||||
|
||||
if imported > 0:
|
||||
logger.info(f" {tableName}: {imported} importiert, {errors} Fehler")
|
||||
|
||||
if not dryRun:
|
||||
conn.commit()
|
||||
else:
|
||||
conn.rollback()
|
||||
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
logger.error(f" Import fehlgeschlagen: {e}")
|
||||
raise
|
||||
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def importDatabase(
|
||||
filePath: str,
|
||||
dryRun: bool = False,
|
||||
clearFirst: bool = False,
|
||||
onlyTables: Optional[List[str]] = None,
|
||||
onlyDatabases: Optional[List[str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Importiert Daten aus einer Migrationsdatei.
|
||||
|
||||
Args:
|
||||
filePath: Pfad zur Migrationsdatei
|
||||
dryRun: Nur simulieren
|
||||
clearFirst: Tabellen vor Import leeren
|
||||
onlyTables: Nur diese Tabellen importieren
|
||||
onlyDatabases: Nur diese Datenbanken importieren
|
||||
|
||||
Returns:
|
||||
Import-Statistiken
|
||||
"""
|
||||
migrationData = loadMigrationFile(filePath)
|
||||
meta = migrationData.get("meta", {})
|
||||
|
||||
logger.info(f"Migrationsdatei geladen:")
|
||||
logger.info(f" Exportiert am: {meta.get('exportedAt', 'unbekannt')}")
|
||||
logger.info(f" Quelle: {meta.get('exportedFrom', 'unbekannt')}")
|
||||
|
||||
stats = {
|
||||
"databases": {},
|
||||
"totalImported": 0,
|
||||
"totalSkipped": 0,
|
||||
"totalErrors": 0
|
||||
}
|
||||
|
||||
# Neues Format (mehrere Datenbanken)
|
||||
if "databases" in migrationData:
|
||||
databases = migrationData["databases"]
|
||||
logger.info(f" Datenbanken: {len(databases)}")
|
||||
logger.info(f" Tabellen: {meta.get('totalTables', 'unbekannt')}")
|
||||
logger.info(f" Datensätze: {meta.get('totalRecords', 'unbekannt')}")
|
||||
|
||||
for dbName, dbData in databases.items():
|
||||
if onlyDatabases and dbName not in onlyDatabases:
|
||||
continue
|
||||
|
||||
logger.info(f"Importiere Datenbank: {dbName}")
|
||||
dbStats = _importSingleDatabase(dbName, dbData, dryRun, clearFirst, onlyTables)
|
||||
|
||||
stats["databases"][dbName] = dbStats
|
||||
stats["totalImported"] += dbStats["totalImported"]
|
||||
stats["totalSkipped"] += dbStats["totalSkipped"]
|
||||
stats["totalErrors"] += dbStats["totalErrors"]
|
||||
|
||||
# Altes Format (einzelne Datenbank - poweron_app)
|
||||
elif "tables" in migrationData:
|
||||
logger.info(" Format: Legacy (einzelne Datenbank)")
|
||||
dbName = "poweron_app"
|
||||
dbData = {"tables": migrationData["tables"]}
|
||||
|
||||
if not onlyDatabases or dbName in onlyDatabases:
|
||||
logger.info(f"Importiere Datenbank: {dbName}")
|
||||
dbStats = _importSingleDatabase(dbName, dbData, dryRun, clearFirst, onlyTables)
|
||||
|
||||
stats["databases"][dbName] = dbStats
|
||||
stats["totalImported"] = dbStats["totalImported"]
|
||||
stats["totalSkipped"] = dbStats["totalSkipped"]
|
||||
stats["totalErrors"] = dbStats["totalErrors"]
|
||||
|
||||
if dryRun:
|
||||
logger.info("Dry-Run: Keine Änderungen vorgenommen")
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
def printImportPreview(filePath: str):
|
||||
"""Zeigt eine Vorschau der zu importierenden Daten."""
|
||||
migrationData = loadMigrationFile(filePath)
|
||||
meta = migrationData.get("meta", {})
|
||||
|
||||
print("\n" + "=" * 70)
|
||||
print("IMPORT VORSCHAU")
|
||||
print("=" * 70)
|
||||
print(f"Datei: {filePath}")
|
||||
print(f"Exportiert am: {meta.get('exportedAt', 'unbekannt')}")
|
||||
print(f"Quelle: {meta.get('exportedFrom', 'unbekannt')}")
|
||||
|
||||
# Neues Format
|
||||
if "databases" in migrationData:
|
||||
databases = migrationData["databases"]
|
||||
print(f"Datenbanken: {len(databases)}")
|
||||
print("=" * 70)
|
||||
|
||||
grandTotal = 0
|
||||
for dbName, dbData in databases.items():
|
||||
tables = dbData.get("tables", {})
|
||||
dbTotal = sum(len(records) for records in tables.values())
|
||||
grandTotal += dbTotal
|
||||
|
||||
print(f"\n{dbName} ({dbTotal} Datensätze)")
|
||||
print("-" * 70)
|
||||
print(f" {'Tabelle':<45} {'Datensätze':>15}")
|
||||
print(f" {'-' * 45} {'-' * 15}")
|
||||
|
||||
for tableName, records in sorted(tables.items()):
|
||||
if len(records) > 0:
|
||||
print(f" {tableName:<45} {len(records):>15}")
|
||||
|
||||
print("\n" + "=" * 70)
|
||||
print(f"GESAMT: {grandTotal} Datensätze")
|
||||
|
||||
# Altes Format
|
||||
elif "tables" in migrationData:
|
||||
tables = migrationData["tables"]
|
||||
print(f"Format: Legacy (poweron_app)")
|
||||
print("-" * 70)
|
||||
print(f"{'Tabelle':<45} {'Datensätze':>15}")
|
||||
print("-" * 70)
|
||||
|
||||
totalRecords = 0
|
||||
for tableName, records in sorted(tables.items()):
|
||||
count = len(records)
|
||||
totalRecords += count
|
||||
if count > 0:
|
||||
print(f"{tableName:<45} {count:>15}")
|
||||
|
||||
print("-" * 70)
|
||||
print(f"{'GESAMT':<45} {totalRecords:>15}")
|
||||
|
||||
print("=" * 70 + "\n")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Importiert Datenbank-Daten aus einer Migrationsdatei",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Datenbanken:
|
||||
poweron_app - User, Mandate, RBAC, Features
|
||||
poweron_chat - Chat-Konversationen
|
||||
poweron_management - Workflows, Prompts, Connections
|
||||
poweron_realestate - Real Estate Daten
|
||||
poweron_trustee - Trustee Daten
|
||||
|
||||
Beispiele:
|
||||
python tool_db_import_migration.py migration_export.json --dry-run
|
||||
python tool_db_import_migration.py migration_export.json --preview
|
||||
python tool_db_import_migration.py migration_export.json --force
|
||||
python tool_db_import_migration.py migration_export.json --clear-first --force
|
||||
python tool_db_import_migration.py migration_export.json --only-db poweron_app
|
||||
python tool_db_import_migration.py migration_export.json --only-tables UserInDB,Mandate
|
||||
"""
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"import_file",
|
||||
help="Pfad zur Migrationsdatei (JSON)",
|
||||
type=str
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
help="Simuliert den Import ohne Änderungen",
|
||||
action="store_true"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--force",
|
||||
help="Bestätigung überspringen",
|
||||
action="store_true"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--clear-first",
|
||||
help="Tabellen vor dem Import leeren",
|
||||
action="store_true"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--only-tables",
|
||||
help="Nur diese Tabellen importieren (komma-getrennt)",
|
||||
type=str,
|
||||
default=""
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--only-db",
|
||||
help="Nur diese Datenbank(en) importieren (komma-getrennt)",
|
||||
type=str,
|
||||
default=""
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--preview",
|
||||
help="Nur Vorschau anzeigen (kein Import)",
|
||||
action="store_true"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Nur Vorschau anzeigen
|
||||
if args.preview:
|
||||
printImportPreview(args.import_file)
|
||||
return
|
||||
|
||||
# Listen parsen
|
||||
onlyTables = None
|
||||
if args.only_tables:
|
||||
onlyTables = [t.strip() for t in args.only_tables.split(",") if t.strip()]
|
||||
|
||||
onlyDatabases = None
|
||||
if args.only_db:
|
||||
onlyDatabases = [db.strip() for db in args.only_db.split(",") if db.strip()]
|
||||
|
||||
# Bestätigung einholen
|
||||
if not args.dry_run and not args.force:
|
||||
printImportPreview(args.import_file)
|
||||
|
||||
if args.clear_first:
|
||||
print("WARNUNG: --clear-first wird ALLE bestehenden Daten in den Zieltabellen löschen!")
|
||||
|
||||
response = input("\nMöchten Sie den Import starten? [y/N]: ")
|
||||
if response.lower() not in ["y", "yes", "j", "ja"]:
|
||||
print("Import abgebrochen.")
|
||||
return
|
||||
|
||||
# Import durchführen
|
||||
try:
|
||||
if args.dry_run:
|
||||
logger.info("=== DRY-RUN MODUS ===")
|
||||
|
||||
stats = importDatabase(
|
||||
filePath=args.import_file,
|
||||
dryRun=args.dry_run,
|
||||
clearFirst=args.clear_first,
|
||||
onlyTables=onlyTables,
|
||||
onlyDatabases=onlyDatabases
|
||||
)
|
||||
|
||||
print("\n" + "=" * 70)
|
||||
print("IMPORT ERGEBNIS")
|
||||
print("=" * 70)
|
||||
print(f"Importiert: {stats['totalImported']} Datensätze")
|
||||
print(f"Übersprungen: {stats['totalSkipped']} Datensätze")
|
||||
print(f"Fehler: {stats['totalErrors']} Datensätze")
|
||||
|
||||
if args.dry_run:
|
||||
print("\n(Dry-Run: Keine tatsächlichen Änderungen vorgenommen)")
|
||||
else:
|
||||
print("\n Import erfolgreich abgeschlossen!")
|
||||
|
||||
print("=" * 70 + "\n")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Import fehlgeschlagen: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Loading…
Reference in a new issue