Date: Mon, 26 Jan 2026 01:29:17 +0100
Subject: [PATCH 26/32] mandate invitation and notification system
---
app.py | 3 +
modules/connectors/connectorDbPostgre.py | 48 +-
modules/datamodels/datamodelInvitation.py | 17 +-
modules/datamodels/datamodelNotification.py | 209 +++++++
modules/routes/routeAdminFeatures.py | 53 +-
modules/routes/routeInvitations.py | 152 +++++-
modules/routes/routeNotifications.py | 575 ++++++++++++++++++++
modules/routes/routeSecurityLocal.py | 47 ++
modules/system/registry.py | 16 +-
9 files changed, 1072 insertions(+), 48 deletions(-)
create mode 100644 modules/datamodels/datamodelNotification.py
create mode 100644 modules/routes/routeNotifications.py
diff --git a/app.py b/app.py
index dec478fc..a6f07f33 100644
--- a/app.py
+++ b/app.py
@@ -492,6 +492,9 @@ app.include_router(featuresAdminRouter)
from modules.routes.routeInvitations import router as invitationsRouter
app.include_router(invitationsRouter)
+from modules.routes.routeNotifications import router as notificationsRouter
+app.include_router(notificationsRouter)
+
from modules.routes.routeAdminRbacExport import router as rbacAdminExportRouter
app.include_router(rbacAdminExportRouter)
diff --git a/modules/connectors/connectorDbPostgre.py b/modules/connectors/connectorDbPostgre.py
index 2dfec2b4..6c89a85f 100644
--- a/modules/connectors/connectorDbPostgre.py
+++ b/modules/connectors/connectorDbPostgre.py
@@ -40,6 +40,34 @@ class SystemTable(BaseModel):
)
+def _isJsonbType(fieldType) -> bool:
+ """Check if a type should be stored as JSONB in PostgreSQL."""
+ # Direct dict or list
+ if fieldType == dict or fieldType == list:
+ return True
+
+ # Generic List[X] or Dict[X, Y]
+ origin = get_origin(fieldType)
+ if origin in (dict, list):
+ return True
+
+ # Direct Pydantic BaseModel subclass
+ if isinstance(fieldType, type) and issubclass(fieldType, BaseModel):
+ return True
+
+ # Optional[X] - check the inner type
+ if origin is Union:
+ args = get_args(fieldType)
+ for arg in args:
+ if arg is type(None):
+ continue
+ # Recursively check the inner type
+ if _isJsonbType(arg):
+ return True
+
+ return False
+
+
def _get_model_fields(model_class) -> Dict[str, str]:
"""Get all fields from Pydantic model and map to SQL types."""
# Pydantic v2
@@ -52,20 +80,7 @@ def _get_model_fields(model_class) -> Dict[str, str]:
# Check for JSONB fields (Dict, List, or complex types)
# Purely type-based detection - no hardcoded field names
- if (
- field_type == dict
- or field_type == list
- or (
- hasattr(field_type, "__origin__")
- and field_type.__origin__ in (dict, list)
- )
- # Check if field type is directly a Pydantic BaseModel subclass (for nested models like TextMultilingual)
- or (isinstance(field_type, type) and issubclass(field_type, BaseModel))
- # Check if field type is Optional[BaseModel] (Union with None)
- or (hasattr(field_type, "__origin__") and get_origin(field_type) is Union
- and any(isinstance(arg, type) and issubclass(arg, BaseModel)
- for arg in get_args(field_type) if arg is not type(None)))
- ):
+ if _isJsonbType(field_type):
fields[field_name] = "JSONB"
# Simple type mapping
elif field_type in (str, type(None)) or (
@@ -970,7 +985,10 @@ class DatabaseConnector:
record["id"] = str(uuid.uuid4())
# Save record
- self._saveRecord(model_class, record["id"], record)
+ success = self._saveRecord(model_class, record["id"], record)
+ if not success:
+ table = model_class.__name__
+ raise ValueError(f"Failed to save record {record['id']} to table {table}")
# Check if this is the first record in the table and register as initial ID
table = model_class.__name__
diff --git a/modules/datamodels/datamodelInvitation.py b/modules/datamodels/datamodelInvitation.py
index a35dfb09..ef6d6a80 100644
--- a/modules/datamodels/datamodelInvitation.py
+++ b/modules/datamodels/datamodelInvitation.py
@@ -46,9 +46,13 @@ class Invitation(BaseModel):
)
# Einladungs-Details
+ targetUsername: str = Field(
+ description="Username of the invited user (must match on acceptance)",
+ json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": True}
+ )
email: Optional[str] = Field(
default=None,
- description="Target email address (optional, for tracking)",
+ description="Email address to send invitation link (optional)",
json_schema_extra={"frontend_type": "email", "frontend_readonly": False, "frontend_required": False}
)
createdBy: str = Field(
@@ -82,6 +86,13 @@ class Invitation(BaseModel):
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False}
)
+ # Email-Status
+ emailSent: bool = Field(
+ default=False,
+ description="Whether the invitation email was successfully sent",
+ json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": False}
+ )
+
# Einschränkungen
maxUses: int = Field(
default=1,
@@ -107,13 +118,15 @@ registerModelLabels(
"mandateId": {"en": "Mandate", "de": "Mandant", "fr": "Mandat"},
"featureInstanceId": {"en": "Feature Instance", "de": "Feature-Instanz", "fr": "Instance"},
"roleIds": {"en": "Roles", "de": "Rollen", "fr": "Rôles"},
- "email": {"en": "Email", "de": "E-Mail", "fr": "Email"},
+ "targetUsername": {"en": "Target Username", "de": "Ziel-Benutzername", "fr": "Nom d'utilisateur cible"},
+ "email": {"en": "Email (optional)", "de": "E-Mail (optional)", "fr": "Email (optionnel)"},
"createdBy": {"en": "Created By", "de": "Erstellt von", "fr": "Créé par"},
"createdAt": {"en": "Created At", "de": "Erstellt am", "fr": "Créé le"},
"expiresAt": {"en": "Expires At", "de": "Gültig bis", "fr": "Expire le"},
"usedBy": {"en": "Used By", "de": "Verwendet von", "fr": "Utilisé par"},
"usedAt": {"en": "Used At", "de": "Verwendet am", "fr": "Utilisé le"},
"revokedAt": {"en": "Revoked At", "de": "Widerrufen am", "fr": "Révoqué le"},
+ "emailSent": {"en": "Email Sent", "de": "E-Mail gesendet", "fr": "Email envoyé"},
"maxUses": {"en": "Max Uses", "de": "Max. Verwendungen", "fr": "Utilisations max"},
"currentUses": {"en": "Current Uses", "de": "Aktuelle Verwendungen", "fr": "Utilisations actuelles"},
},
diff --git a/modules/datamodels/datamodelNotification.py b/modules/datamodels/datamodelNotification.py
new file mode 100644
index 00000000..b1475767
--- /dev/null
+++ b/modules/datamodels/datamodelNotification.py
@@ -0,0 +1,209 @@
+# Copyright (c) 2025 Patrick Motsch
+# All rights reserved.
+"""
+Notification model for in-app notifications.
+Supports actionable notifications (e.g., invitation accept/decline).
+"""
+
+import uuid
+from typing import Optional, List
+from enum import Enum
+from pydantic import BaseModel, Field, ConfigDict
+from modules.shared.attributeUtils import registerModelLabels
+from modules.shared.timeUtils import getUtcTimestamp
+
+
+class NotificationType(str, Enum):
+ """Types of notifications"""
+ INVITATION = "invitation" # Einladung zu Mandat/Feature
+ SYSTEM = "system" # System-Nachrichten
+ WORKFLOW = "workflow" # Workflow-Status Updates
+ MENTION = "mention" # Erwähnung in Chat/Kommentar
+
+
+class NotificationStatus(str, Enum):
+ """Status of a notification"""
+ UNREAD = "unread" # Noch nicht gelesen
+ READ = "read" # Gelesen
+ ACTIONED = "actioned" # Aktion wurde durchgeführt
+ DISMISSED = "dismissed" # Verworfen/Geschlossen
+
+
+class NotificationAction(BaseModel):
+ """Possible action for a notification"""
+ actionId: str = Field(
+ description="Unique identifier for the action (e.g., 'accept', 'decline')"
+ )
+ label: str = Field(
+ description="Display label for the action button"
+ )
+ style: str = Field(
+ default="default",
+ description="Button style: 'primary', 'danger', 'default'"
+ )
+
+
+class UserNotification(BaseModel):
+ """
+ In-app notification for a user.
+ Supports actionable notifications with accept/decline buttons.
+ """
+ id: str = Field(
+ default_factory=lambda: str(uuid.uuid4()),
+ description="Unique ID of the notification",
+ json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
+ )
+ userId: str = Field(
+ description="Target user ID for this notification",
+ json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True}
+ )
+
+ # Notification type and status
+ type: NotificationType = Field(
+ default=NotificationType.SYSTEM,
+ description="Type of notification",
+ json_schema_extra={
+ "frontend_type": "select",
+ "frontend_readonly": True,
+ "frontend_required": True,
+ "frontend_options": [
+ {"value": "invitation", "label": {"en": "Invitation", "de": "Einladung"}},
+ {"value": "system", "label": {"en": "System", "de": "System"}},
+ {"value": "workflow", "label": {"en": "Workflow", "de": "Workflow"}},
+ {"value": "mention", "label": {"en": "Mention", "de": "Erwähnung"}}
+ ]
+ }
+ )
+ status: NotificationStatus = Field(
+ default=NotificationStatus.UNREAD,
+ description="Current status of the notification",
+ json_schema_extra={
+ "frontend_type": "select",
+ "frontend_readonly": True,
+ "frontend_required": False,
+ "frontend_options": [
+ {"value": "unread", "label": {"en": "Unread", "de": "Ungelesen"}},
+ {"value": "read", "label": {"en": "Read", "de": "Gelesen"}},
+ {"value": "actioned", "label": {"en": "Actioned", "de": "Bearbeitet"}},
+ {"value": "dismissed", "label": {"en": "Dismissed", "de": "Verworfen"}}
+ ]
+ }
+ )
+
+ # Content
+ title: str = Field(
+ description="Notification title",
+ json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True}
+ )
+ message: str = Field(
+ description="Notification message/body",
+ json_schema_extra={"frontend_type": "textarea", "frontend_readonly": True, "frontend_required": True}
+ )
+ icon: Optional[str] = Field(
+ default=None,
+ description="Optional icon identifier (e.g., 'mail', 'warning', 'info')",
+ json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
+ )
+
+ # Reference to triggering object (for actionable notifications)
+ referenceType: Optional[str] = Field(
+ default=None,
+ description="Type of referenced object (e.g., 'Invitation', 'Workflow')",
+ json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
+ )
+ referenceId: Optional[str] = Field(
+ default=None,
+ description="ID of referenced object",
+ json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
+ )
+
+ # Actions (for actionable notifications like invitations)
+ actions: Optional[List[NotificationAction]] = Field(
+ default=None,
+ description="List of possible actions for this notification",
+ json_schema_extra={"frontend_type": "json", "frontend_readonly": True, "frontend_required": False}
+ )
+
+ # Action result (when user takes action)
+ actionTaken: Optional[str] = Field(
+ default=None,
+ description="Which action was taken (actionId)",
+ json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
+ )
+ actionResult: Optional[str] = Field(
+ default=None,
+ description="Result message from the action",
+ json_schema_extra={"frontend_type": "textarea", "frontend_readonly": True, "frontend_required": False}
+ )
+
+ # Timestamps
+ createdAt: float = Field(
+ default_factory=getUtcTimestamp,
+ description="When the notification was created (UTC timestamp)",
+ json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False}
+ )
+ readAt: Optional[float] = Field(
+ default=None,
+ description="When the notification was read (UTC timestamp)",
+ json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False}
+ )
+ actionedAt: Optional[float] = Field(
+ default=None,
+ description="When action was taken (UTC timestamp)",
+ json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False}
+ )
+ expiresAt: Optional[float] = Field(
+ default=None,
+ description="When the notification expires (optional, UTC timestamp)",
+ json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False}
+ )
+
+ model_config = ConfigDict(use_enum_values=True)
+
+
+registerModelLabels(
+ "UserNotification",
+ {"en": "Notification", "de": "Benachrichtigung", "fr": "Notification"},
+ {
+ "id": {"en": "ID", "de": "ID", "fr": "ID"},
+ "userId": {"en": "User", "de": "Benutzer", "fr": "Utilisateur"},
+ "type": {"en": "Type", "de": "Typ", "fr": "Type"},
+ "status": {"en": "Status", "de": "Status", "fr": "Statut"},
+ "title": {"en": "Title", "de": "Titel", "fr": "Titre"},
+ "message": {"en": "Message", "de": "Nachricht", "fr": "Message"},
+ "icon": {"en": "Icon", "de": "Symbol", "fr": "Icône"},
+ "referenceType": {"en": "Reference Type", "de": "Referenz-Typ", "fr": "Type de référence"},
+ "referenceId": {"en": "Reference ID", "de": "Referenz-ID", "fr": "ID de référence"},
+ "actions": {"en": "Actions", "de": "Aktionen", "fr": "Actions"},
+ "actionTaken": {"en": "Action Taken", "de": "Durchgeführte Aktion", "fr": "Action effectuée"},
+ "actionResult": {"en": "Action Result", "de": "Aktions-Ergebnis", "fr": "Résultat de l'action"},
+ "createdAt": {"en": "Created At", "de": "Erstellt am", "fr": "Créé le"},
+ "readAt": {"en": "Read At", "de": "Gelesen am", "fr": "Lu le"},
+ "actionedAt": {"en": "Actioned At", "de": "Bearbeitet am", "fr": "Traité le"},
+ "expiresAt": {"en": "Expires At", "de": "Gültig bis", "fr": "Expire le"},
+ },
+)
+
+
+registerModelLabels(
+ "NotificationType",
+ {"en": "Notification Type", "de": "Benachrichtigungs-Typ", "fr": "Type de notification"},
+ {
+ "invitation": {"en": "Invitation", "de": "Einladung", "fr": "Invitation"},
+ "system": {"en": "System", "de": "System", "fr": "Système"},
+ "workflow": {"en": "Workflow", "de": "Workflow", "fr": "Workflow"},
+ "mention": {"en": "Mention", "de": "Erwähnung", "fr": "Mention"},
+ },
+)
+
+
+registerModelLabels(
+ "NotificationStatus",
+ {"en": "Notification Status", "de": "Benachrichtigungs-Status", "fr": "Statut de notification"},
+ {
+ "unread": {"en": "Unread", "de": "Ungelesen", "fr": "Non lu"},
+ "read": {"en": "Read", "de": "Gelesen", "fr": "Lu"},
+ "actioned": {"en": "Actioned", "de": "Bearbeitet", "fr": "Traité"},
+ "dismissed": {"en": "Dismissed", "de": "Verworfen", "fr": "Rejeté"},
+ },
+)
diff --git a/modules/routes/routeAdminFeatures.py b/modules/routes/routeAdminFeatures.py
index 1bb6be16..82b796c1 100644
--- a/modules/routes/routeAdminFeatures.py
+++ b/modules/routes/routeAdminFeatures.py
@@ -21,6 +21,7 @@ from modules.datamodels.datamodelUam import User, UserInDB
from modules.datamodels.datamodelFeatures import Feature, FeatureInstance
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.interfaces.interfaceFeatures import getFeatureInterface
+from modules.security.rbacCatalog import getCatalogService
logger = logging.getLogger(__name__)
@@ -72,15 +73,16 @@ async def list_features(
"""
List all available features.
- Returns global feature definitions that can be activated for mandates.
+ Returns global feature definitions from the RBAC Catalog.
+ Features are automatically registered at startup from feature containers.
Any authenticated user can see available features.
"""
try:
- rootInterface = getRootInterface()
- featureInterface = getFeatureInterface(rootInterface.db)
-
- features = featureInterface.getAllFeatures()
- return [f.model_dump() for f in features]
+ # Features come from the RBAC Catalog (registered at startup from feature containers)
+ # NOT from the database - features are code-defined, not user-created
+ catalogService = getCatalogService()
+ features = catalogService.getFeatureDefinitions()
+ return features
except Exception as e:
logger.error(f"Error listing features: {e}")
@@ -153,14 +155,15 @@ async def get_my_feature_instances(
"features": []
}
- # Get feature info
+ # Get feature info from catalog (features are code-defined)
featureKey = f"{mandateId}_{instance.featureCode}"
if featureKey not in featuresMap:
- feature = featureInterface.getFeature(instance.featureCode)
+ catalogService = getCatalogService()
+ featureDef = catalogService.getFeatureDefinition(instance.featureCode)
featuresMap[featureKey] = {
"code": instance.featureCode,
- "label": feature.label if feature and hasattr(feature, 'label') else {"de": instance.featureCode, "en": instance.featureCode},
- "icon": feature.icon if feature and hasattr(feature, 'icon') else "folder",
+ "label": featureDef.get("label", {"de": instance.featureCode, "en": instance.featureCode}) if featureDef else {"de": instance.featureCode, "en": instance.featureCode},
+ "icon": featureDef.get("icon", "folder") if featureDef else "folder",
"instances": [],
"_mandateId": mandateId # Temporary for grouping
}
@@ -376,8 +379,9 @@ async def create_feature(
rootInterface = getRootInterface()
featureInterface = getFeatureInterface(rootInterface.db)
- # Check if feature already exists
- existing = featureInterface.getFeature(code)
+ # Check if feature already exists in catalog (features are code-defined)
+ catalogService = getCatalogService()
+ existing = catalogService.getFeatureDefinition(code)
if existing:
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
@@ -525,9 +529,10 @@ async def create_feature_instance(
rootInterface = getRootInterface()
featureInterface = getFeatureInterface(rootInterface.db)
- # Verify feature exists
- feature = featureInterface.getFeature(data.featureCode)
- if not feature:
+ # Verify feature exists in catalog (features are code-defined, not DB-stored)
+ catalogService = getCatalogService()
+ featureDef = catalogService.getFeatureDefinition(data.featureCode)
+ if not featureDef:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Feature '{data.featureCode}' not found"
@@ -818,9 +823,10 @@ async def create_template_role(
rootInterface = getRootInterface()
featureInterface = getFeatureInterface(rootInterface.db)
- # Verify feature exists
- feature = featureInterface.getFeature(featureCode)
- if not feature:
+ # Verify feature exists in catalog (features are code-defined)
+ catalogService = getCatalogService()
+ featureDef = catalogService.getFeatureDefinition(featureCode)
+ if not featureDef:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Feature '{featureCode}' not found"
@@ -1331,17 +1337,16 @@ async def get_feature(
featureCode: Feature code (e.g., 'trustee', 'chatbot')
"""
try:
- rootInterface = getRootInterface()
- featureInterface = getFeatureInterface(rootInterface.db)
-
- feature = featureInterface.getFeature(featureCode)
- if not feature:
+ # Features come from the RBAC Catalog (code-defined, not DB-stored)
+ catalogService = getCatalogService()
+ featureDef = catalogService.getFeatureDefinition(featureCode)
+ if not featureDef:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Feature '{featureCode}' not found"
)
- return feature.model_dump()
+ return featureDef
except HTTPException:
raise
diff --git a/modules/routes/routeInvitations.py b/modules/routes/routeInvitations.py
index 47fda648..2196bd73 100644
--- a/modules/routes/routeInvitations.py
+++ b/modules/routes/routeInvitations.py
@@ -37,7 +37,8 @@ router = APIRouter(
class InvitationCreate(BaseModel):
"""Request model for creating an invitation"""
- email: Optional[str] = Field(None, description="Target email address (optional)")
+ targetUsername: str = Field(..., description="Username of the user to invite (must match on acceptance)")
+ email: Optional[str] = Field(None, description="Email address to send invitation link (optional)")
roleIds: List[str] = Field(..., description="Role IDs to assign to the invited user")
featureInstanceId: Optional[str] = Field(None, description="Optional feature instance access")
expiresInHours: int = Field(
@@ -61,6 +62,7 @@ class InvitationResponse(BaseModel):
mandateId: str
featureInstanceId: Optional[str]
roleIds: List[str]
+ targetUsername: str
email: Optional[str]
createdBy: str
createdAt: float
@@ -71,6 +73,7 @@ class InvitationResponse(BaseModel):
maxUses: int
currentUses: int
inviteUrl: str # Full URL for the invitation
+ emailSent: bool = False # Whether invitation email was sent
class InvitationValidation(BaseModel):
@@ -78,8 +81,11 @@ class InvitationValidation(BaseModel):
valid: bool
reason: Optional[str]
mandateId: Optional[str]
+ mandateName: Optional[str] = None
featureInstanceId: Optional[str]
roleIds: List[str]
+ roleLabels: List[str] = []
+ targetUsername: Optional[str] = None
# =============================================================================
@@ -118,6 +124,11 @@ async def create_invitation(
try:
rootInterface = getRootInterface()
+ # Note: targetUsername does NOT need to exist yet!
+ # The invitation can be for a user who will register later.
+ # When they register with this username (or accept the invitation),
+ # they will get the assigned roles.
+
# Validate role IDs exist and belong to this mandate or are global
for roleId in data.roleIds:
from modules.datamodels.datamodelRbac import Role
@@ -164,6 +175,7 @@ async def create_invitation(
mandateId=str(context.mandateId),
featureInstanceId=data.featureInstanceId,
roleIds=data.roleIds,
+ targetUsername=data.targetUsername,
email=data.email,
createdBy=str(context.user.id),
expiresAt=expiresAt,
@@ -179,9 +191,98 @@ async def create_invitation(
frontendUrl = APP_CONFIG.get("APP_FRONTEND_URL", "http://localhost:8080")
inviteUrl = f"{frontendUrl}/invite/{invitation.token}"
+ # Send email if email address is provided
+ emailSent = False
+ if data.email:
+ try:
+ from modules.connectors.connectorMessagingEmail import ConnectorMessagingEmail
+ from modules.datamodels.datamodelUam import Mandate
+
+ # Get mandate name for the email
+ mandateRecords = rootInterface.db.getRecordset(
+ Mandate,
+ recordFilter={"id": str(context.mandateId)}
+ )
+ mandateName = mandateRecords[0].get("name", "PowerOn") if mandateRecords else "PowerOn"
+
+ emailConnector = ConnectorMessagingEmail()
+ emailSubject = f"Einladung zu {mandateName}"
+ emailBody = f"""
+
+
+ Sie wurden eingeladen!
+ Hallo {data.targetUsername},
+ Sie wurden eingeladen, dem Mandanten {mandateName} beizutreten.
+ Klicken Sie auf den folgenden Link, um die Einladung anzunehmen:
+
+
+ Einladung annehmen
+
+
+
+ Oder kopieren Sie diesen Link in Ihren Browser:
+ {inviteUrl}
+
+
+ Diese Einladung ist {data.expiresInHours} Stunden gültig.
+
+
+
+ Diese E-Mail wurde automatisch von PowerOn gesendet.
+
+
+
+ """
+
+ emailConnector.send(
+ recipient=data.email,
+ subject=emailSubject,
+ message=emailBody
+ )
+ emailSent = True
+ logger.info(f"Invitation email sent to {data.email} for user {data.targetUsername}")
+ except Exception as emailError:
+ logger.warning(f"Failed to send invitation email to {data.email}: {emailError}")
+ # Don't fail the invitation creation if email fails
+
+ # Update the invitation record with emailSent status
+ if emailSent:
+ rootInterface.db.recordModify(
+ Invitation,
+ createdRecord.get("id"),
+ {"emailSent": True}
+ )
+ createdRecord["emailSent"] = True
+
+ # If the target user already exists, create an in-app notification
+ try:
+ existingUser = rootInterface.getUserByUsername(data.targetUsername)
+ if existingUser:
+ from modules.routes.routeNotifications import createInvitationNotification
+ from modules.datamodels.datamodelUam import Mandate
+
+ # Get mandate name for notification
+ mandateRecords = rootInterface.db.getRecordset(
+ Mandate,
+ recordFilter={"id": str(context.mandateId)}
+ )
+ mandateName = mandateRecords[0].get("mandateLabel", "PowerOn") if mandateRecords else "PowerOn"
+ inviterName = context.user.fullName or context.user.username
+
+ createInvitationNotification(
+ userId=str(existingUser.id),
+ invitationId=str(createdRecord.get("id")),
+ mandateName=mandateName,
+ inviterName=inviterName
+ )
+ logger.info(f"Created notification for existing user {data.targetUsername}")
+ except Exception as notifError:
+ logger.warning(f"Failed to create notification for user {data.targetUsername}: {notifError}")
+ # Don't fail the invitation if notification fails
+
logger.info(
- f"User {context.user.id} created invitation for mandate {context.mandateId}, "
- f"expires in {data.expiresInHours}h"
+ f"User {context.user.id} created invitation for user {data.targetUsername} "
+ f"to mandate {context.mandateId}, expires in {data.expiresInHours}h"
)
return InvitationResponse(
@@ -190,6 +291,7 @@ async def create_invitation(
mandateId=str(createdRecord.get("mandateId")),
featureInstanceId=createdRecord.get("featureInstanceId"),
roleIds=createdRecord.get("roleIds", []),
+ targetUsername=createdRecord.get("targetUsername"),
email=createdRecord.get("email"),
createdBy=str(createdRecord.get("createdBy")),
createdAt=createdRecord.get("createdAt"),
@@ -199,7 +301,8 @@ async def create_invitation(
revokedAt=createdRecord.get("revokedAt"),
maxUses=createdRecord.get("maxUses", 1),
currentUses=createdRecord.get("currentUses", 0),
- inviteUrl=inviteUrl
+ inviteUrl=inviteUrl,
+ emailSent=emailSent
)
except HTTPException:
@@ -441,12 +544,38 @@ async def validate_invitation(
roleIds=[]
)
+ # Get additional info for display
+ mandateId = invitation.get("mandateId")
+ mandateName = None
+ roleLabels = []
+ targetUsername = invitation.get("targetUsername")
+
+ # Get mandate name
+ from modules.datamodels.datamodelUam import Mandate
+ mandateRecords = rootInterface.db.getRecordset(
+ Mandate,
+ recordFilter={"id": mandateId}
+ )
+ if mandateRecords:
+ mandateName = mandateRecords[0].get("name")
+
+ # Get role names
+ roleIds = invitation.get("roleIds", [])
+ from modules.datamodels.datamodelRbac import Role
+ for roleId in roleIds:
+ roleRecords = rootInterface.db.getRecordset(Role, recordFilter={"id": roleId})
+ if roleRecords:
+ roleLabels.append(roleRecords[0].get("roleLabel", roleId))
+
return InvitationValidation(
valid=True,
reason=None,
- mandateId=invitation.get("mandateId"),
+ mandateId=mandateId,
+ mandateName=mandateName,
featureInstanceId=invitation.get("featureInstanceId"),
- roleIds=invitation.get("roleIds", [])
+ roleIds=roleIds,
+ roleLabels=roleLabels,
+ targetUsername=targetUsername
)
except Exception as e:
@@ -513,6 +642,17 @@ async def accept_invitation(
detail="Invitation has reached maximum uses"
)
+ # Validate username matches - the invitation is bound to a specific user
+ targetUsername = invitation.get("targetUsername")
+ if targetUsername and currentUser.username != targetUsername:
+ logger.warning(
+ f"User {currentUser.username} tried to accept invitation meant for {targetUsername}"
+ )
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail=f"Diese Einladung ist für Benutzer '{targetUsername}' bestimmt"
+ )
+
mandateId = invitation.get("mandateId")
roleIds = invitation.get("roleIds", [])
featureInstanceId = invitation.get("featureInstanceId")
diff --git a/modules/routes/routeNotifications.py b/modules/routes/routeNotifications.py
new file mode 100644
index 00000000..2016a745
--- /dev/null
+++ b/modules/routes/routeNotifications.py
@@ -0,0 +1,575 @@
+# Copyright (c) 2025 Patrick Motsch
+# All rights reserved.
+"""
+Notification routes for in-app notifications.
+Provides user-specific notification inbox with support for actionable notifications.
+"""
+
+from fastapi import APIRouter, HTTPException, Depends, Request
+from typing import List, Dict, Any, Optional
+from fastapi import status
+import logging
+from pydantic import BaseModel, Field
+
+from modules.auth import limiter, getCurrentUser
+from modules.datamodels.datamodelUam import User
+from modules.datamodels.datamodelNotification import (
+ UserNotification,
+ NotificationType,
+ NotificationStatus,
+ NotificationAction
+)
+from modules.interfaces.interfaceDbApp import getRootInterface
+from modules.shared.timeUtils import getUtcTimestamp
+
+logger = logging.getLogger(__name__)
+
+router = APIRouter(
+ prefix="/api/notifications",
+ tags=["Notifications"],
+ responses={404: {"description": "Not found"}}
+)
+
+
+# =============================================================================
+# Request/Response Models
+# =============================================================================
+
+class NotificationActionRequest(BaseModel):
+ """Request model for executing a notification action"""
+ actionId: str = Field(..., description="ID of the action to execute (e.g., 'accept', 'decline')")
+
+
+class UnreadCountResponse(BaseModel):
+ """Response model for unread count"""
+ count: int
+
+
+# =============================================================================
+# Helper Functions
+# =============================================================================
+
+def _createNotification(
+ userId: str,
+ notificationType: NotificationType,
+ title: str,
+ message: str,
+ referenceType: Optional[str] = None,
+ referenceId: Optional[str] = None,
+ actions: Optional[List[NotificationAction]] = None,
+ icon: Optional[str] = None,
+ expiresAt: Optional[float] = None
+) -> UserNotification:
+ """
+ Create a notification for a user.
+ This is a helper function that can be imported by other modules.
+ """
+ rootInterface = getRootInterface()
+
+ notification = UserNotification(
+ userId=userId,
+ type=notificationType,
+ title=title,
+ message=message,
+ referenceType=referenceType,
+ referenceId=referenceId,
+ actions=actions,
+ icon=icon,
+ expiresAt=expiresAt
+ )
+
+ # Store in database
+ rootInterface.db.recordCreate(
+ model_class=UserNotification,
+ record=notification.model_dump()
+ )
+
+ logger.info(f"Created notification {notification.id} for user {userId}: {title}")
+ return notification
+
+
+def createInvitationNotification(
+ userId: str,
+ invitationId: str,
+ mandateName: str,
+ inviterName: str
+) -> UserNotification:
+ """
+ Create a notification for a pending invitation.
+ Called when an invitation is created for an existing user.
+ """
+ return _createNotification(
+ userId=userId,
+ notificationType=NotificationType.INVITATION,
+ title="Neue Einladung",
+ message=f"{inviterName} hat Sie zu '{mandateName}' eingeladen.",
+ referenceType="Invitation",
+ referenceId=invitationId,
+ icon="mail",
+ actions=[
+ NotificationAction(actionId="accept", label="Annehmen", style="primary"),
+ NotificationAction(actionId="decline", label="Ablehnen", style="danger")
+ ]
+ )
+
+
+# =============================================================================
+# API Endpoints
+# =============================================================================
+
+@router.get("", response_model=List[Dict[str, Any]])
+@limiter.limit("60/minute")
+async def getNotifications(
+ request: Request,
+ currentUser: User = Depends(getCurrentUser),
+ status: Optional[str] = None,
+ type: Optional[str] = None,
+ limit: int = 50
+) -> List[Dict[str, Any]]:
+ """
+ Get all notifications for the current user.
+
+ Optionally filter by status (unread, read, actioned, dismissed) or type.
+ """
+ try:
+ rootInterface = getRootInterface()
+
+ # Build filter
+ recordFilter = {"userId": str(currentUser.id)}
+ if status:
+ recordFilter["status"] = status
+ if type:
+ recordFilter["type"] = type
+
+ # Get notifications
+ notifications = rootInterface.db.getRecordset(
+ model_class=UserNotification,
+ recordFilter=recordFilter
+ )
+
+ # Sort by creation date (newest first) and limit
+ notifications = sorted(notifications, key=lambda x: x.get("createdAt", 0), reverse=True)
+ if limit:
+ notifications = notifications[:limit]
+
+ return notifications
+
+ except Exception as e:
+ logger.error(f"Error getting notifications: {e}")
+ raise HTTPException(
+ status_code=500,
+ detail=f"Failed to get notifications: {str(e)}"
+ )
+
+
+@router.get("/unread-count", response_model=UnreadCountResponse)
+@limiter.limit("120/minute")
+async def getUnreadCount(
+ request: Request,
+ currentUser: User = Depends(getCurrentUser)
+) -> UnreadCountResponse:
+ """
+ Get the count of unread notifications for the current user.
+ Used for the notification badge in the header.
+ """
+ try:
+ rootInterface = getRootInterface()
+
+ notifications = rootInterface.db.getRecordset(
+ model_class=UserNotification,
+ recordFilter={
+ "userId": str(currentUser.id),
+ "status": NotificationStatus.UNREAD.value
+ }
+ )
+
+ return UnreadCountResponse(count=len(notifications))
+
+ except Exception as e:
+ logger.error(f"Error getting unread count: {e}")
+ raise HTTPException(
+ status_code=500,
+ detail=f"Failed to get unread count: {str(e)}"
+ )
+
+
+@router.put("/{notificationId}/read", response_model=Dict[str, Any])
+@limiter.limit("60/minute")
+async def markAsRead(
+ request: Request,
+ notificationId: str,
+ currentUser: User = Depends(getCurrentUser)
+) -> Dict[str, Any]:
+ """
+ Mark a notification as read.
+ """
+ try:
+ rootInterface = getRootInterface()
+
+ # Get the notification
+ notifications = rootInterface.db.getRecordset(
+ model_class=UserNotification,
+ recordFilter={"id": notificationId}
+ )
+
+ if not notifications:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="Notification not found"
+ )
+
+ notification = notifications[0]
+
+ # Verify ownership
+ if notification.get("userId") != currentUser.id:
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="Not authorized to access this notification"
+ )
+
+ # Update status
+ rootInterface.db.recordModify(
+ model_class=UserNotification,
+ recordId=notificationId,
+ record={
+ "status": NotificationStatus.READ.value,
+ "readAt": getUtcTimestamp()
+ }
+ )
+
+ return {"message": "Notification marked as read", "id": notificationId}
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error marking notification as read: {e}")
+ raise HTTPException(
+ status_code=500,
+ detail=f"Failed to mark notification as read: {str(e)}"
+ )
+
+
+@router.put("/mark-all-read", response_model=Dict[str, Any])
+@limiter.limit("10/minute")
+async def markAllAsRead(
+ request: Request,
+ currentUser: User = Depends(getCurrentUser)
+) -> Dict[str, Any]:
+ """
+ Mark all notifications as read for the current user.
+ """
+ try:
+ rootInterface = getRootInterface()
+
+ # Get all unread notifications
+ notifications = rootInterface.db.getRecordset(
+ model_class=UserNotification,
+ recordFilter={
+ "userId": currentUser.id,
+ "status": NotificationStatus.UNREAD.value
+ }
+ )
+
+ currentTime = getUtcTimestamp()
+ updatedCount = 0
+
+ for notification in notifications:
+ rootInterface.db.recordModify(
+ model_class=UserNotification,
+ recordId=notification.get("id"),
+ record={
+ "status": NotificationStatus.READ.value,
+ "readAt": currentTime
+ }
+ )
+ updatedCount += 1
+
+ return {"message": f"Marked {updatedCount} notifications as read", "count": updatedCount}
+
+ except Exception as e:
+ logger.error(f"Error marking all notifications as read: {e}")
+ raise HTTPException(
+ status_code=500,
+ detail=f"Failed to mark notifications as read: {str(e)}"
+ )
+
+
+@router.post("/{notificationId}/action", response_model=Dict[str, Any])
+@limiter.limit("30/minute")
+async def executeAction(
+ request: Request,
+ notificationId: str,
+ actionRequest: NotificationActionRequest,
+ currentUser: User = Depends(getCurrentUser)
+) -> Dict[str, Any]:
+ """
+ Execute an action on a notification (e.g., accept/decline invitation).
+ """
+ try:
+ rootInterface = getRootInterface()
+
+ # Get the notification
+ notifications = rootInterface.db.getRecordset(
+ model_class=UserNotification,
+ recordFilter={"id": notificationId}
+ )
+
+ if not notifications:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="Notification not found"
+ )
+
+ notification = notifications[0]
+
+ # Verify ownership
+ if notification.get("userId") != currentUser.id:
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="Not authorized to access this notification"
+ )
+
+ # Check if already actioned
+ if notification.get("status") == NotificationStatus.ACTIONED.value:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Notification has already been actioned"
+ )
+
+ # Validate action exists
+ actions = notification.get("actions", [])
+ validActionIds = [a.get("actionId") if isinstance(a, dict) else a.actionId for a in (actions or [])]
+
+ if actionRequest.actionId not in validActionIds:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail=f"Invalid action. Valid actions: {validActionIds}"
+ )
+
+ # Execute action based on notification type
+ actionResult = None
+
+ if notification.get("type") == NotificationType.INVITATION.value:
+ actionResult = await _handleInvitationAction(
+ notification=notification,
+ actionId=actionRequest.actionId,
+ currentUser=currentUser,
+ rootInterface=rootInterface
+ )
+ else:
+ # Generic action handling
+ actionResult = f"Action '{actionRequest.actionId}' executed"
+
+ # Update notification status
+ rootInterface.db.recordModify(
+ model_class=UserNotification,
+ recordId=notificationId,
+ record={
+ "status": NotificationStatus.ACTIONED.value,
+ "actionTaken": actionRequest.actionId,
+ "actionResult": actionResult,
+ "actionedAt": getUtcTimestamp()
+ }
+ )
+
+ return {
+ "message": actionResult,
+ "action": actionRequest.actionId,
+ "notificationId": notificationId
+ }
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error executing notification action: {e}")
+ raise HTTPException(
+ status_code=500,
+ detail=f"Failed to execute action: {str(e)}"
+ )
+
+
+async def _handleInvitationAction(
+ notification: Dict[str, Any],
+ actionId: str,
+ currentUser: User,
+ rootInterface
+) -> str:
+ """Handle accept/decline actions for invitation notifications."""
+ from modules.datamodels.datamodelInvitation import Invitation
+ from modules.datamodels.datamodelUam import Mandate
+ from modules.datamodels.datamodelMembership import UserMandate
+
+ invitationId = notification.get("referenceId")
+ if not invitationId:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="No invitation reference found"
+ )
+
+ # Get the invitation
+ invitations = rootInterface.db.getRecordset(
+ model_class=Invitation,
+ recordFilter={"id": invitationId}
+ )
+
+ if not invitations:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="Invitation not found"
+ )
+
+ invitation = invitations[0]
+
+ # Verify username matches
+ if invitation.get("targetUsername") != currentUser.username:
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="This invitation is for a different user"
+ )
+
+ # Check if invitation is still valid
+ currentTime = getUtcTimestamp()
+ if invitation.get("expiresAt", 0) < currentTime:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Invitation has expired"
+ )
+
+ if invitation.get("revokedAt"):
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Invitation has been revoked"
+ )
+
+ if invitation.get("currentUses", 0) >= invitation.get("maxUses", 1):
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="Invitation has reached maximum uses"
+ )
+
+ if actionId == "accept":
+ # Accept the invitation - assign roles and mandate access
+ mandateId = invitation.get("mandateId")
+ roleIds = invitation.get("roleIds", [])
+
+ # Get mandate name for result message
+ mandates = rootInterface.db.getRecordset(
+ model_class=Mandate,
+ recordFilter={"id": mandateId}
+ )
+ mandateName = mandates[0].get("mandateLabel", mandateId) if mandates else mandateId
+
+ # Check if user already has this mandate
+ existingMemberships = rootInterface.db.getRecordset(
+ model_class=UserMandate,
+ recordFilter={
+ "userId": currentUser.id,
+ "mandateId": mandateId
+ }
+ )
+
+ if existingMemberships:
+ # Update existing membership with new roles
+ existingMembership = existingMemberships[0]
+ existingRoles = existingMembership.get("roleIds", [])
+ mergedRoles = list(set(existingRoles + roleIds))
+
+ rootInterface.db.recordModify(
+ model_class=UserMandate,
+ recordId=existingMembership.get("id"),
+ record={"roleIds": mergedRoles}
+ )
+ logger.info(f"Updated UserMandate for user {currentUser.id} in mandate {mandateId}")
+ else:
+ # Create new user-mandate relationship
+ userMandate = UserMandate(
+ userId=currentUser.id,
+ mandateId=mandateId,
+ roleIds=roleIds
+ )
+ rootInterface.db.recordCreate(
+ model_class=UserMandate,
+ record=userMandate.model_dump()
+ )
+ logger.info(f"Created UserMandate for user {currentUser.id} in mandate {mandateId}")
+
+ # Mark invitation as used
+ rootInterface.db.recordModify(
+ model_class=Invitation,
+ recordId=invitationId,
+ record={
+ "usedBy": currentUser.id,
+ "usedAt": currentTime,
+ "currentUses": invitation.get("currentUses", 0) + 1
+ }
+ )
+
+ logger.info(f"User {currentUser.id} accepted invitation {invitationId} for mandate {mandateId}")
+ return f"Einladung angenommen. Sie haben jetzt Zugang zu '{mandateName}'."
+
+ elif actionId == "decline":
+ # Decline the invitation
+ # We don't revoke it, just mark the notification as declined
+ logger.info(f"User {currentUser.id} declined invitation {invitationId}")
+ return "Einladung abgelehnt."
+
+ else:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail=f"Unknown action: {actionId}"
+ )
+
+
+@router.delete("/{notificationId}", response_model=Dict[str, Any])
+@limiter.limit("30/minute")
+async def deleteNotification(
+ request: Request,
+ notificationId: str,
+ currentUser: User = Depends(getCurrentUser)
+) -> Dict[str, Any]:
+ """
+ Delete/dismiss a notification.
+ """
+ try:
+ rootInterface = getRootInterface()
+
+ # Get the notification
+ notifications = rootInterface.db.getRecordset(
+ model_class=UserNotification,
+ recordFilter={"id": notificationId}
+ )
+
+ if not notifications:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="Notification not found"
+ )
+
+ notification = notifications[0]
+
+ # Verify ownership
+ if notification.get("userId") != currentUser.id:
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN,
+ detail="Not authorized to delete this notification"
+ )
+
+ # Mark as dismissed (soft delete)
+ rootInterface.db.recordModify(
+ model_class=UserNotification,
+ recordId=notificationId,
+ record={
+ "status": NotificationStatus.DISMISSED.value
+ }
+ )
+
+ return {"message": "Notification dismissed", "id": notificationId}
+
+ except HTTPException:
+ raise
+ except Exception as e:
+ logger.error(f"Error deleting notification: {e}")
+ raise HTTPException(
+ status_code=500,
+ detail=f"Failed to delete notification: {str(e)}"
+ )
diff --git a/modules/routes/routeSecurityLocal.py b/modules/routes/routeSecurityLocal.py
index 8ab211cf..8f11a9af 100644
--- a/modules/routes/routeSecurityLocal.py
+++ b/modules/routes/routeSecurityLocal.py
@@ -20,6 +20,7 @@ from modules.interfaces.interfaceDbApp import getInterface, getRootInterface
from modules.datamodels.datamodelUam import User, UserInDB, AuthAuthority, Mandate
from modules.datamodels.datamodelSecurity import Token
from modules.shared.configuration import APP_CONFIG
+from modules.shared.timeUtils import getUtcTimestamp
# Configure logger
logger = logging.getLogger(__name__)
@@ -322,6 +323,52 @@ Falls Sie sich nicht registriert haben, können Sie diese E-Mail ignorieren."""
logger.error(f"Error sending registration email: {str(emailErr)}")
# Don't fail registration if email fails - user can request reset later
+ # Check for pending invitations and create notifications
+ try:
+ from modules.datamodels.datamodelInvitation import Invitation
+ from modules.routes.routeNotifications import createInvitationNotification
+ from modules.datamodels.datamodelUam import Mandate
+
+ currentTime = getUtcTimestamp()
+ pendingInvitations = appInterface.db.getRecordset(
+ model_class=Invitation,
+ recordFilter={"targetUsername": userData.username}
+ )
+
+ for invitation in pendingInvitations:
+ # Skip expired, revoked, or fully used invitations
+ if invitation.get("expiresAt", 0) < currentTime:
+ continue
+ if invitation.get("revokedAt"):
+ continue
+ if invitation.get("currentUses", 0) >= invitation.get("maxUses", 1):
+ continue
+
+ # Get mandate name for notification
+ mandateId = invitation.get("mandateId")
+ mandateRecords = appInterface.db.getRecordset(
+ Mandate,
+ recordFilter={"id": mandateId}
+ )
+ mandateName = mandateRecords[0].get("mandateLabel", "PowerOn") if mandateRecords else "PowerOn"
+
+ # Get inviter name
+ inviterId = invitation.get("createdBy")
+ inviter = appInterface.getUserById(inviterId) if inviterId else None
+ inviterName = (inviter.fullName or inviter.username) if inviter else "PowerOn"
+
+ createInvitationNotification(
+ userId=str(user.id),
+ invitationId=str(invitation.get("id")),
+ mandateName=mandateName,
+ inviterName=inviterName
+ )
+ logger.info(f"Created notification for new user {userData.username} for invitation {invitation.get('id')}")
+
+ except Exception as notifErr:
+ logger.warning(f"Failed to create notifications for pending invitations: {notifErr}")
+ # Don't fail registration if notification creation fails
+
return {
"message": "Registrierung erfolgreich! Bitte prüfen Sie Ihre E-Mail für den Link zum Setzen Ihres Passworts."
}
diff --git a/modules/system/registry.py b/modules/system/registry.py
index 5431b706..8477b045 100644
--- a/modules/system/registry.py
+++ b/modules/system/registry.py
@@ -111,7 +111,7 @@ def loadFeatureMainModules() -> Dict[str, Any]:
def registerAllFeaturesInCatalog(catalogService) -> Dict[str, bool]:
"""
Register all features' RBAC objects in the catalog.
- Also registers system-level RBAC objects.
+ Also registers system-level RBAC objects and feature definitions.
"""
results = {}
@@ -132,6 +132,20 @@ def registerAllFeaturesInCatalog(catalogService) -> Dict[str, bool]:
mainModules = loadFeatureMainModules()
for featureName, module in mainModules.items():
+ # Register feature definition in catalog (for /api/features/ endpoint)
+ if hasattr(module, "getFeatureDefinition"):
+ try:
+ featureDef = module.getFeatureDefinition()
+ catalogService.registerFeatureDefinition(
+ featureCode=featureDef.get("code", featureName),
+ label=featureDef.get("label", {"en": featureName, "de": featureName}),
+ icon=featureDef.get("icon", "mdi-puzzle")
+ )
+ logger.info(f"Registered feature definition: {featureDef.get('code', featureName)}")
+ except Exception as e:
+ logger.error(f"Error registering feature definition for {featureName}: {e}")
+
+ # Register RBAC objects (UI, RESOURCE, DATA)
if hasattr(module, "registerFeature"):
try:
success = module.registerFeature(catalogService)
From 829711f7551b10ab7c95bc0b9cbf994f52655908 Mon Sep 17 00:00:00 2001
From: ValueOn AG
Date: Mon, 26 Jan 2026 12:39:00 +0100
Subject: [PATCH 27/32] fixed system and dynamic data rbac
---
modules/datamodels/datamodelChat.py | 41 +-
.../interfaceFeatureNeutralizer.py | 84 +++-
.../mainServiceNeutralization.py | 7 +-
modules/interfaces/interfaceBootstrap.py | 387 ++++++++++++++----
modules/interfaces/interfaceDbApp.py | 8 +-
modules/interfaces/interfaceDbChat.py | 80 ++--
modules/interfaces/interfaceRbac.py | 101 ++++-
modules/routes/routeAdminRbacRules.py | 29 +-
modules/routes/routeDataConnections.py | 9 +-
modules/routes/routeNotifications.py | 12 +
modules/security/rbac.py | 64 +--
modules/system/mainSystem.py | 72 ++--
.../actions/getExpensesFromPdf.py | 46 ++-
tests/unit/rbac/test_rbac_bootstrap.py | 10 +-
tests/unit/rbac/test_rbac_permissions.py | 26 +-
15 files changed, 695 insertions(+), 281 deletions(-)
diff --git a/modules/datamodels/datamodelChat.py b/modules/datamodels/datamodelChat.py
index 328bee22..3d71bf63 100644
--- a/modules/datamodels/datamodelChat.py
+++ b/modules/datamodels/datamodelChat.py
@@ -11,15 +11,10 @@ import uuid
class ChatStat(BaseModel):
+ """Statistics for chat operations. User-owned, no mandate context."""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
)
- mandateId: Optional[str] = Field(
- default="", description="ID of the mandate this stat belongs to"
- )
- featureInstanceId: Optional[str] = Field(
- default="", description="ID of the feature instance this stat belongs to"
- )
workflowId: Optional[str] = Field(
None, description="Foreign key to workflow (for workflow stats)"
)
@@ -39,8 +34,6 @@ registerModelLabels(
{"en": "Chat Statistics", "fr": "Statistiques de chat"},
{
"id": {"en": "ID", "fr": "ID"},
- "mandateId": {"en": "Mandate ID", "fr": "ID du mandat"},
- "featureInstanceId": {"en": "Feature Instance ID", "fr": "ID de l'instance de fonctionnalité"},
"workflowId": {"en": "Workflow ID", "fr": "ID du workflow"},
"processingTime": {"en": "Processing Time", "fr": "Temps de traitement"},
"bytesSent": {"en": "Bytes Sent", "fr": "Octets envoyés"},
@@ -54,15 +47,10 @@ registerModelLabels(
class ChatLog(BaseModel):
+ """Log entries for chat workflows. User-owned, no mandate context."""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
)
- mandateId: Optional[str] = Field(
- default="", description="ID of the mandate this log belongs to"
- )
- featureInstanceId: Optional[str] = Field(
- default="", description="ID of the feature instance this log belongs to"
- )
workflowId: str = Field(description="Foreign key to workflow")
message: str = Field(description="Log message")
type: str = Field(description="Log type (info, warning, error, etc.)")
@@ -93,8 +81,6 @@ registerModelLabels(
{"en": "Chat Log", "fr": "Journal de chat"},
{
"id": {"en": "ID", "fr": "ID"},
- "mandateId": {"en": "Mandate ID", "fr": "ID du mandat"},
- "featureInstanceId": {"en": "Feature Instance ID", "fr": "ID de l'instance de fonctionnalité"},
"workflowId": {"en": "Workflow ID", "fr": "ID du flux de travail"},
"message": {"en": "Message", "fr": "Message"},
"type": {"en": "Type", "fr": "Type"},
@@ -107,15 +93,10 @@ registerModelLabels(
class ChatDocument(BaseModel):
+ """Documents attached to chat messages. User-owned, no mandate context."""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
)
- mandateId: Optional[str] = Field(
- default="", description="ID of the mandate this document belongs to"
- )
- featureInstanceId: Optional[str] = Field(
- default="", description="ID of the feature instance this document belongs to"
- )
messageId: str = Field(description="Foreign key to message")
fileId: str = Field(description="Foreign key to file")
fileName: str = Field(description="Name of the file")
@@ -134,8 +115,6 @@ registerModelLabels(
{"en": "Chat Document", "fr": "Document de chat"},
{
"id": {"en": "ID", "fr": "ID"},
- "mandateId": {"en": "Mandate ID", "fr": "ID du mandat"},
- "featureInstanceId": {"en": "Feature Instance ID", "fr": "ID de l'instance de fonctionnalité"},
"messageId": {"en": "Message ID", "fr": "ID du message"},
"fileId": {"en": "File ID", "fr": "ID du fichier"},
"fileName": {"en": "File Name", "fr": "Nom du fichier"},
@@ -221,15 +200,10 @@ registerModelLabels(
class ChatMessage(BaseModel):
+ """Messages in chat workflows. User-owned, no mandate context."""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
)
- mandateId: Optional[str] = Field(
- default="", description="ID of the mandate this message belongs to"
- )
- featureInstanceId: Optional[str] = Field(
- default="", description="ID of the feature instance this message belongs to"
- )
workflowId: str = Field(description="Foreign key to workflow")
parentMessageId: Optional[str] = Field(
None, description="Parent message ID for threading"
@@ -281,8 +255,6 @@ registerModelLabels(
{"en": "Chat Message", "fr": "Message de chat"},
{
"id": {"en": "ID", "fr": "ID"},
- "mandateId": {"en": "Mandate ID", "fr": "ID du mandat"},
- "featureInstanceId": {"en": "Feature Instance ID", "fr": "ID de l'instance de fonctionnalité"},
"workflowId": {"en": "Workflow ID", "fr": "ID du flux de travail"},
"parentMessageId": {"en": "Parent Message ID", "fr": "ID du message parent"},
"documents": {"en": "Documents", "fr": "Documents"},
@@ -326,9 +298,8 @@ registerModelLabels(
class ChatWorkflow(BaseModel):
+ """Chat workflow container. User-owned, no mandate context."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
- mandateId: Optional[str] = Field(default="", description="ID of the mandate this workflow belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
- featureInstanceId: Optional[str] = Field(default="", description="ID of the feature instance this workflow belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
status: str = Field(default="running", description="Current status of the workflow", json_schema_extra={"frontend_type": "select", "frontend_readonly": False, "frontend_required": False, "frontend_options": [
{"value": "running", "label": {"en": "Running", "fr": "En cours"}},
{"value": "completed", "label": {"en": "Completed", "fr": "Terminé"}},
@@ -402,8 +373,6 @@ registerModelLabels(
{"en": "Chat Workflow", "fr": "Flux de travail de chat"},
{
"id": {"en": "ID", "fr": "ID"},
- "mandateId": {"en": "Mandate ID", "fr": "ID du mandat"},
- "featureInstanceId": {"en": "Feature Instance ID", "fr": "ID de l'instance de fonctionnalité"},
"status": {"en": "Status", "fr": "Statut"},
"name": {"en": "Name", "fr": "Nom"},
"currentRound": {"en": "Current Round", "fr": "Tour actuel"},
diff --git a/modules/features/neutralization/interfaceFeatureNeutralizer.py b/modules/features/neutralization/interfaceFeatureNeutralizer.py
index 970f51ff..54533166 100644
--- a/modules/features/neutralization/interfaceFeatureNeutralizer.py
+++ b/modules/features/neutralization/interfaceFeatureNeutralizer.py
@@ -12,29 +12,76 @@ from modules.features.neutralization.datamodelFeatureNeutralizer import (
DataNeutraliserConfig,
DataNeutralizerAttributes,
)
+from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
+from modules.shared.configuration import APP_CONFIG
from modules.shared.timeUtils import getUtcTimestamp
+from modules.datamodels.datamodelUam import User
logger = logging.getLogger(__name__)
+# Singleton cache for interface instances
+_neutralizerInterfaces = {}
+
class InterfaceFeatureNeutralizer:
"""Database interface for Neutralizer feature operations"""
- def __init__(self, db, currentUser, mandateId: str, userId: str):
+ # Feature code for RBAC objectKey construction
+ FEATURE_CODE = "neutralization"
+
+ def __init__(self, currentUser: Optional[User] = None, mandateId: Optional[str] = None, featureInstanceId: Optional[str] = None):
"""
Initialize the interface with database connection and user context.
Args:
- db: Database connection instance
currentUser: Current user object for RBAC
mandateId: Current mandate ID
- userId: Current user ID
+ featureInstanceId: Current feature instance ID
"""
- self.db = db
self.currentUser = currentUser
self.mandateId = mandateId
- self.userId = userId
+ self.featureInstanceId = featureInstanceId
+ self.userId = currentUser.id if currentUser else None
+ self.db = None
+
+ # Initialize database
+ self._initializeDatabase()
+
+ def _initializeDatabase(self):
+ """Initialize the database connection."""
+ try:
+ # Use same database config pattern as other feature interfaces
+ dbHost = APP_CONFIG.get("DB_HOST", "localhost")
+ dbDatabase = APP_CONFIG.get("DB_DATABASE_NEUTRALIZATION", APP_CONFIG.get("DB_DATABASE", "poweron"))
+ dbUser = APP_CONFIG.get("DB_USER", "postgres")
+ dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
+ dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
+
+ self.db = DatabaseConnector(
+ dbHost=dbHost,
+ dbDatabase=dbDatabase,
+ dbUser=dbUser,
+ dbPassword=dbPassword,
+ dbPort=dbPort,
+ userId=self.userId,
+ )
+ self.db.initDbSystem()
+ logger.debug("Neutralizer database initialized successfully")
+ except Exception as e:
+ logger.error(f"Error initializing Neutralizer database: {str(e)}")
+ raise
+
+ def setUserContext(self, currentUser: User, mandateId: Optional[str] = None, featureInstanceId: Optional[str] = None):
+ """Sets the user context for the interface."""
+ if not currentUser:
+ logger.info("Initializing interface without user context")
+ return
+
+ self.currentUser = currentUser
+ self.userId = currentUser.id
+ self.mandateId = mandateId
+ self.featureInstanceId = featureInstanceId
def getNeutralizationConfig(self) -> Optional[DataNeutraliserConfig]:
"""Get the data neutralization configuration for the current user's mandate"""
@@ -160,17 +207,34 @@ class InterfaceFeatureNeutralizer:
return None
-def getInterface(db, currentUser, mandateId: str, userId: str) -> InterfaceFeatureNeutralizer:
+def getInterface(currentUser: Optional[User] = None, mandateId: Optional[str] = None, featureInstanceId: Optional[str] = None) -> InterfaceFeatureNeutralizer:
"""
- Factory function to create a Neutralizer interface instance.
+ Factory function to get or create a Neutralizer interface instance.
+ Uses singleton pattern per user context.
Args:
- db: Database connection
currentUser: Current user for RBAC
mandateId: Current mandate ID
- userId: Current user ID
+ featureInstanceId: Current feature instance ID
Returns:
InterfaceFeatureNeutralizer instance
"""
- return InterfaceFeatureNeutralizer(db, currentUser, mandateId, userId)
+ global _neutralizerInterfaces
+
+ if not currentUser:
+ raise ValueError("Valid user context required")
+
+ effectiveMandateId = str(mandateId) if mandateId else None
+ effectiveFeatureInstanceId = str(featureInstanceId) if featureInstanceId else None
+
+ # Include featureInstanceId in cache key for proper isolation
+ cacheKey = f"{currentUser.id}_{effectiveMandateId}_{effectiveFeatureInstanceId}"
+
+ if cacheKey not in _neutralizerInterfaces:
+ _neutralizerInterfaces[cacheKey] = InterfaceFeatureNeutralizer(currentUser, mandateId=effectiveMandateId, featureInstanceId=effectiveFeatureInstanceId)
+ else:
+ # Update user context if needed
+ _neutralizerInterfaces[cacheKey].setUserContext(currentUser, mandateId=effectiveMandateId, featureInstanceId=effectiveFeatureInstanceId)
+
+ return _neutralizerInterfaces[cacheKey]
diff --git a/modules/features/neutralization/serviceNeutralization/mainServiceNeutralization.py b/modules/features/neutralization/serviceNeutralization/mainServiceNeutralization.py
index 4351f400..b4b34cf7 100644
--- a/modules/features/neutralization/serviceNeutralization/mainServiceNeutralization.py
+++ b/modules/features/neutralization/serviceNeutralization/mainServiceNeutralization.py
@@ -14,7 +14,7 @@ import json
from typing import Dict, List, Any, Optional
from modules.features.neutralization.datamodelFeatureNeutralizer import DataNeutraliserConfig, DataNeutralizerAttributes
-from modules.features.neutralization.interfaceFeatureNeutralizer import InterfaceFeatureNeutralizer
+from modules.features.neutralization.interfaceFeatureNeutralizer import InterfaceFeatureNeutralizer, getInterface as getNeutralizerInterface
# Import all necessary classes and functions for neutralization
from .subProcessCommon import CommonUtils, NeutralizationResult, NeutralizationAttribute
@@ -42,11 +42,10 @@ class NeutralizationService:
self.interfaceNeutralizer: InterfaceFeatureNeutralizer = None
if serviceCenter and serviceCenter.interfaceDbApp:
dbApp = serviceCenter.interfaceDbApp
- self.interfaceNeutralizer = InterfaceFeatureNeutralizer(
- db=dbApp.db,
+ self.interfaceNeutralizer = getNeutralizerInterface(
currentUser=dbApp.currentUser,
mandateId=dbApp.mandateId,
- userId=dbApp.userId
+ featureInstanceId=getattr(dbApp, 'featureInstanceId', None)
)
# Initialize anonymization processors
diff --git a/modules/interfaces/interfaceBootstrap.py b/modules/interfaces/interfaceBootstrap.py
index 0e66ce7b..c73c4dd1 100644
--- a/modules/interfaces/interfaceBootstrap.py
+++ b/modules/interfaces/interfaceBootstrap.py
@@ -277,6 +277,9 @@ def initRbacRules(db: DatabaseConnector) -> None:
existingRules = db.getRecordset(AccessRule)
if existingRules:
logger.info(f"RBAC rules already exist ({len(existingRules)} rules)")
+ # Still ensure UI and DATA rules exist (may have been added later)
+ _ensureUiContextRules(db)
+ _ensureDataContextRules(db)
return
logger.info("Initializing RBAC rules")
@@ -377,20 +380,31 @@ def _createTableSpecificRules(db: DatabaseConnector) -> None:
viewerId = _getRoleId(db, "viewer")
# ==========================================================================
- # SYSTEM TABLE RULES - Using standardized dot format: data.system.{TableName}
+ # DATA TABLE RULES - Using semantic namespace structure
# ==========================================================================
- # All DATA context items MUST use the full objectKey format for consistency.
- # This matches the DATA_OBJECTS registration in mainSystem.py.
- # Feature tables use: data.feature.{featureCode}.{TableName}
+ # Namespace structure:
+ # - data.uam.* → User Access Management (mandantenübergreifend)
+ # - data.chat.* → Chat/AI-Daten (benutzer-eigen, kein Mandantenkontext)
+ # - data.files.* → Dateien (benutzer-eigen)
+ # - data.automation.* → Automation (benutzer-eigen)
+ # - data.feature.* → Mandanten-/Feature-spezifische Daten (dynamisch)
+ #
+ # GROUP-Berechtigung:
+ # - data.uam.*: GROUP filtert nach Mandant (via UserMandate)
+ # - data.chat.*, data.files.*, data.automation.*: GROUP = MY (benutzer-eigen)
# ==========================================================================
+ # -------------------------------------------------------------------------
+ # UAM Namespace - User Access Management
+ # -------------------------------------------------------------------------
+
# Mandate table - Only SysAdmin (flag) can access, not roles
# Regular roles have no access to Mandate table
if adminId:
tableRules.append(AccessRule(
roleId=adminId,
context=AccessRuleContext.DATA,
- item="data.system.Mandate",
+ item="data.uam.Mandate",
view=False,
read=AccessLevel.NONE,
create=AccessLevel.NONE,
@@ -401,7 +415,7 @@ def _createTableSpecificRules(db: DatabaseConnector) -> None:
tableRules.append(AccessRule(
roleId=userId,
context=AccessRuleContext.DATA,
- item="data.system.Mandate",
+ item="data.uam.Mandate",
view=False,
read=AccessLevel.NONE,
create=AccessLevel.NONE,
@@ -412,7 +426,7 @@ def _createTableSpecificRules(db: DatabaseConnector) -> None:
tableRules.append(AccessRule(
roleId=viewerId,
context=AccessRuleContext.DATA,
- item="data.system.Mandate",
+ item="data.uam.Mandate",
view=False,
read=AccessLevel.NONE,
create=AccessLevel.NONE,
@@ -425,7 +439,7 @@ def _createTableSpecificRules(db: DatabaseConnector) -> None:
tableRules.append(AccessRule(
roleId=adminId,
context=AccessRuleContext.DATA,
- item="data.system.UserInDB",
+ item="data.uam.UserInDB",
view=True,
read=AccessLevel.GROUP,
create=AccessLevel.GROUP,
@@ -436,7 +450,7 @@ def _createTableSpecificRules(db: DatabaseConnector) -> None:
tableRules.append(AccessRule(
roleId=userId,
context=AccessRuleContext.DATA,
- item="data.system.UserInDB",
+ item="data.uam.UserInDB",
view=True,
read=AccessLevel.MY,
create=AccessLevel.NONE,
@@ -447,7 +461,7 @@ def _createTableSpecificRules(db: DatabaseConnector) -> None:
tableRules.append(AccessRule(
roleId=viewerId,
context=AccessRuleContext.DATA,
- item="data.system.UserInDB",
+ item="data.uam.UserInDB",
view=True,
read=AccessLevel.MY,
create=AccessLevel.NONE,
@@ -455,92 +469,37 @@ def _createTableSpecificRules(db: DatabaseConnector) -> None:
delete=AccessLevel.NONE,
))
- # FileItem and UserConnection: All users (user, admin, viewer) only MY-level CRUD
- restrictedTables = [
- "data.system.UserConnection", # User connections/sessions - only own records
- "data.system.FileItem", # Uploaded files - only own files
- ]
-
- for objectKey in restrictedTables:
- # Admin: Only MY-level access (not group-level!)
- if adminId:
+ # UserConnection: All users only MY-level CRUD (UAM namespace)
+ for roleId in [adminId, userId]:
+ if roleId:
tableRules.append(AccessRule(
- roleId=adminId,
+ roleId=roleId,
context=AccessRuleContext.DATA,
- item=objectKey,
+ item="data.uam.UserConnection",
view=True,
read=AccessLevel.MY,
create=AccessLevel.MY,
update=AccessLevel.MY,
delete=AccessLevel.MY,
))
- # User: MY-level CRUD
- if userId:
- tableRules.append(AccessRule(
- roleId=userId,
- context=AccessRuleContext.DATA,
- item=objectKey,
- view=True,
- read=AccessLevel.MY,
- create=AccessLevel.MY,
- update=AccessLevel.MY,
- delete=AccessLevel.MY,
- ))
- # Viewer: MY-level read-only
- if viewerId:
- tableRules.append(AccessRule(
- roleId=viewerId,
- context=AccessRuleContext.DATA,
- item=objectKey,
- view=True,
- read=AccessLevel.MY,
- create=AccessLevel.NONE,
- update=AccessLevel.NONE,
- delete=AccessLevel.NONE,
- ))
-
- # Prompt: Special rule - CRUD for MY + Read for GROUP
- # Each user can manage own prompts (m) but can read group prompts (g)
- if adminId:
- tableRules.append(AccessRule(
- roleId=adminId,
- context=AccessRuleContext.DATA,
- item="data.system.Prompt",
- view=True,
- read=AccessLevel.GROUP,
- create=AccessLevel.MY,
- update=AccessLevel.MY,
- delete=AccessLevel.MY,
- ))
- if userId:
- tableRules.append(AccessRule(
- roleId=userId,
- context=AccessRuleContext.DATA,
- item="data.system.Prompt",
- view=True,
- read=AccessLevel.GROUP,
- create=AccessLevel.MY,
- update=AccessLevel.MY,
- delete=AccessLevel.MY,
- ))
if viewerId:
tableRules.append(AccessRule(
roleId=viewerId,
context=AccessRuleContext.DATA,
- item="data.system.Prompt",
+ item="data.uam.UserConnection",
view=True,
- read=AccessLevel.GROUP,
+ read=AccessLevel.MY,
create=AccessLevel.NONE,
update=AccessLevel.NONE,
delete=AccessLevel.NONE,
))
- # Invitation: Standard group-level access
+ # Invitation: Standard group-level access (UAM namespace)
if adminId:
tableRules.append(AccessRule(
roleId=adminId,
context=AccessRuleContext.DATA,
- item="data.system.Invitation",
+ item="data.uam.Invitation",
view=True,
read=AccessLevel.GROUP,
create=AccessLevel.GROUP,
@@ -551,7 +510,7 @@ def _createTableSpecificRules(db: DatabaseConnector) -> None:
tableRules.append(AccessRule(
roleId=userId,
context=AccessRuleContext.DATA,
- item="data.system.Invitation",
+ item="data.uam.Invitation",
view=True,
read=AccessLevel.MY,
create=AccessLevel.MY,
@@ -562,7 +521,7 @@ def _createTableSpecificRules(db: DatabaseConnector) -> None:
tableRules.append(AccessRule(
roleId=viewerId,
context=AccessRuleContext.DATA,
- item="data.system.Invitation",
+ item="data.uam.Invitation",
view=True,
read=AccessLevel.MY,
create=AccessLevel.NONE,
@@ -570,13 +529,12 @@ def _createTableSpecificRules(db: DatabaseConnector) -> None:
delete=AccessLevel.NONE,
))
- # AuthEvent table - Audit logs (no delete allowed for audit integrity!)
- # SysAdmin can delete via isSysAdmin bypass, but regular admins cannot
+ # AuthEvent table - Audit logs (UAM namespace, no delete for audit integrity!)
if adminId:
tableRules.append(AccessRule(
roleId=adminId,
context=AccessRuleContext.DATA,
- item="data.system.AuthEvent",
+ item="data.uam.AuthEvent",
view=True,
read=AccessLevel.ALL,
create=AccessLevel.NONE,
@@ -587,7 +545,7 @@ def _createTableSpecificRules(db: DatabaseConnector) -> None:
tableRules.append(AccessRule(
roleId=userId,
context=AccessRuleContext.DATA,
- item="data.system.AuthEvent",
+ item="data.uam.AuthEvent",
view=True,
read=AccessLevel.MY,
create=AccessLevel.NONE,
@@ -598,7 +556,120 @@ def _createTableSpecificRules(db: DatabaseConnector) -> None:
tableRules.append(AccessRule(
roleId=viewerId,
context=AccessRuleContext.DATA,
- item="data.system.AuthEvent",
+ item="data.uam.AuthEvent",
+ view=True,
+ read=AccessLevel.MY,
+ create=AccessLevel.NONE,
+ update=AccessLevel.NONE,
+ delete=AccessLevel.NONE,
+ ))
+
+ # -------------------------------------------------------------------------
+ # Chat Namespace - User-owned, no mandate context
+ # -------------------------------------------------------------------------
+
+ # Prompt: Only MY-level access (user-owned, no mandate context)
+ # Each user manages only their own prompts
+ for roleId in [adminId, userId]:
+ if roleId:
+ tableRules.append(AccessRule(
+ roleId=roleId,
+ context=AccessRuleContext.DATA,
+ item="data.chat.Prompt",
+ view=True,
+ read=AccessLevel.MY,
+ create=AccessLevel.MY,
+ update=AccessLevel.MY,
+ delete=AccessLevel.MY,
+ ))
+ if viewerId:
+ tableRules.append(AccessRule(
+ roleId=viewerId,
+ context=AccessRuleContext.DATA,
+ item="data.chat.Prompt",
+ view=True,
+ read=AccessLevel.MY,
+ create=AccessLevel.NONE,
+ update=AccessLevel.NONE,
+ delete=AccessLevel.NONE,
+ ))
+
+ # ChatWorkflow: Only MY-level access (user-owned, no mandate context)
+ for roleId in [adminId, userId]:
+ if roleId:
+ tableRules.append(AccessRule(
+ roleId=roleId,
+ context=AccessRuleContext.DATA,
+ item="data.chat.ChatWorkflow",
+ view=True,
+ read=AccessLevel.MY,
+ create=AccessLevel.MY,
+ update=AccessLevel.MY,
+ delete=AccessLevel.MY,
+ ))
+ if viewerId:
+ tableRules.append(AccessRule(
+ roleId=viewerId,
+ context=AccessRuleContext.DATA,
+ item="data.chat.ChatWorkflow",
+ view=True,
+ read=AccessLevel.MY,
+ create=AccessLevel.NONE,
+ update=AccessLevel.NONE,
+ delete=AccessLevel.NONE,
+ ))
+
+ # -------------------------------------------------------------------------
+ # Files Namespace - User-owned, no mandate context
+ # -------------------------------------------------------------------------
+
+ # FileItem: Only MY-level access (user-owned)
+ for roleId in [adminId, userId]:
+ if roleId:
+ tableRules.append(AccessRule(
+ roleId=roleId,
+ context=AccessRuleContext.DATA,
+ item="data.files.FileItem",
+ view=True,
+ read=AccessLevel.MY,
+ create=AccessLevel.MY,
+ update=AccessLevel.MY,
+ delete=AccessLevel.MY,
+ ))
+ if viewerId:
+ tableRules.append(AccessRule(
+ roleId=viewerId,
+ context=AccessRuleContext.DATA,
+ item="data.files.FileItem",
+ view=True,
+ read=AccessLevel.MY,
+ create=AccessLevel.NONE,
+ update=AccessLevel.NONE,
+ delete=AccessLevel.NONE,
+ ))
+
+ # -------------------------------------------------------------------------
+ # Automation Namespace - User-owned, no mandate context
+ # -------------------------------------------------------------------------
+
+ # AutomationDefinition: Only MY-level access (user-owned)
+ for roleId in [adminId, userId]:
+ if roleId:
+ tableRules.append(AccessRule(
+ roleId=roleId,
+ context=AccessRuleContext.DATA,
+ item="data.automation.AutomationDefinition",
+ view=True,
+ read=AccessLevel.MY,
+ create=AccessLevel.MY,
+ update=AccessLevel.MY,
+ delete=AccessLevel.MY,
+ ))
+ if viewerId:
+ tableRules.append(AccessRule(
+ roleId=viewerId,
+ context=AccessRuleContext.DATA,
+ item="data.automation.AutomationDefinition",
view=True,
read=AccessLevel.MY,
create=AccessLevel.NONE,
@@ -670,6 +741,160 @@ def _createUiContextRules(db: DatabaseConnector) -> None:
logger.info(f"Created {len(uiRules)} UI context rules")
+def _ensureUiContextRules(db: DatabaseConnector) -> None:
+ """
+ Ensure UI context rules exist for all navigation items.
+ This is called during bootstrap to add missing UI rules for new navigation items.
+
+ Args:
+ db: Database connector instance
+ """
+ from modules.system.mainSystem import NAVIGATION_SECTIONS
+
+ adminId = _getRoleId(db, "admin")
+ userId = _getRoleId(db, "user")
+ viewerId = _getRoleId(db, "viewer")
+
+ # Get existing UI rules
+ existingUiRules = db.getRecordset(
+ AccessRule,
+ recordFilter={"context": AccessRuleContext.UI.value}
+ )
+
+ # Build set of existing (roleId, item) combinations
+ existingCombinations = set()
+ for rule in existingUiRules:
+ roleId = rule.get("roleId")
+ item = rule.get("item")
+ if roleId and item:
+ existingCombinations.add((roleId, item))
+
+ # Check each navigation item and add missing rules
+ missingRules = []
+ for section in NAVIGATION_SECTIONS:
+ isAdminSection = section.get("adminOnly", False)
+
+ for item in section.get("items", []):
+ objectKey = item.get("objectKey")
+ if not objectKey:
+ continue
+
+ isAdminOnly = item.get("adminOnly", False) or isAdminSection
+
+ if isAdminOnly:
+ # Admin-only: only admin role
+ if adminId and (adminId, objectKey) not in existingCombinations:
+ missingRules.append(AccessRule(
+ roleId=adminId,
+ context=AccessRuleContext.UI,
+ item=objectKey,
+ view=True,
+ read=None, create=None, update=None, delete=None,
+ ))
+ else:
+ # Public/normal: all roles
+ for roleId in [adminId, userId, viewerId]:
+ if roleId and (roleId, objectKey) not in existingCombinations:
+ missingRules.append(AccessRule(
+ roleId=roleId,
+ context=AccessRuleContext.UI,
+ item=objectKey,
+ view=True,
+ read=None, create=None, update=None, delete=None,
+ ))
+
+ # Create missing rules
+ if missingRules:
+ for rule in missingRules:
+ db.recordCreate(AccessRule, rule)
+ logger.info(f"Created {len(missingRules)} missing UI context rules")
+ else:
+ logger.debug("All UI context rules already exist")
+
+
+def _ensureDataContextRules(db: DatabaseConnector) -> None:
+ """
+ Ensure DATA context rules exist for key tables like ChatWorkflow and AutomationDefinition.
+ This is called during bootstrap to add missing DATA rules for new tables.
+
+ Args:
+ db: Database connector instance
+ """
+ adminId = _getRoleId(db, "admin")
+ userId = _getRoleId(db, "user")
+ viewerId = _getRoleId(db, "viewer")
+
+ # Get existing DATA rules
+ existingDataRules = db.getRecordset(
+ AccessRule,
+ recordFilter={"context": AccessRuleContext.DATA.value}
+ )
+
+ # Build set of existing (roleId, item) combinations
+ existingCombinations = set()
+ for rule in existingDataRules:
+ roleId = rule.get("roleId")
+ item = rule.get("item")
+ if roleId and item:
+ existingCombinations.add((roleId, item))
+
+ # Define tables that need rules (user-owned, no mandate context)
+ # Users can only manage their own records (MY-level access)
+ tablesNeedingRules = [
+ "data.chat.ChatWorkflow",
+ "data.automation.AutomationDefinition",
+ ]
+
+ missingRules = []
+ for objectKey in tablesNeedingRules:
+ # Admin: MY-level access (user-owned, no mandate context)
+ if adminId and (adminId, objectKey) not in existingCombinations:
+ missingRules.append(AccessRule(
+ roleId=adminId,
+ context=AccessRuleContext.DATA,
+ item=objectKey,
+ view=True,
+ read=AccessLevel.MY,
+ create=AccessLevel.MY,
+ update=AccessLevel.MY,
+ delete=AccessLevel.MY,
+ ))
+
+ # User: MY-level access (user-owned, no mandate context)
+ if userId and (userId, objectKey) not in existingCombinations:
+ missingRules.append(AccessRule(
+ roleId=userId,
+ context=AccessRuleContext.DATA,
+ item=objectKey,
+ view=True,
+ read=AccessLevel.MY,
+ create=AccessLevel.MY,
+ update=AccessLevel.MY,
+ delete=AccessLevel.MY,
+ ))
+
+ # Viewer: MY read-only (user-owned, no mandate context)
+ if viewerId and (viewerId, objectKey) not in existingCombinations:
+ missingRules.append(AccessRule(
+ roleId=viewerId,
+ context=AccessRuleContext.DATA,
+ item=objectKey,
+ view=True,
+ read=AccessLevel.MY,
+ create=AccessLevel.NONE,
+ update=AccessLevel.NONE,
+ delete=AccessLevel.NONE,
+ ))
+
+ # Create missing rules
+ if missingRules:
+ for rule in missingRules:
+ db.recordCreate(AccessRule, rule)
+ logger.info(f"Created {len(missingRules)} missing DATA context rules")
+ else:
+ logger.debug("All DATA context rules already exist")
+
+
def _createResourceContextRules(db: DatabaseConnector) -> None:
"""
Create RESOURCE context rules for controlling resource access.
diff --git a/modules/interfaces/interfaceDbApp.py b/modules/interfaces/interfaceDbApp.py
index a7dfc689..250b2a38 100644
--- a/modules/interfaces/interfaceDbApp.py
+++ b/modules/interfaces/interfaceDbApp.py
@@ -1217,10 +1217,10 @@ class AppObjects:
The created UserConnection object
"""
try:
- # Get the user
- user = self.getUser(userId)
- if not user:
- raise ValueError(f"User not found: {userId}")
+ # Note: User verification is skipped here because:
+ # 1. The caller (route) already has an authenticated currentUser
+ # 2. Users should always be able to create connections for themselves
+ # 3. getUser() uses RBAC filtering which may fail for users without UserInDB view permissions
# Create new connection with all required fields
connection = UserConnection(
diff --git a/modules/interfaces/interfaceDbChat.py b/modules/interfaces/interfaceDbChat.py
index 3c4d35ad..6a43599b 100644
--- a/modules/interfaces/interfaceDbChat.py
+++ b/modules/interfaces/interfaceDbChat.py
@@ -364,10 +364,13 @@ class ChatObjects:
return False
tableName = modelClass.__name__
+ # Use buildDataObjectKey for semantic namespace lookup
+ from modules.interfaces.interfaceRbac import buildDataObjectKey
+ objectKey = buildDataObjectKey(tableName)
permissions = self.rbac.getUserPermissions(
self.currentUser,
AccessRuleContext.DATA,
- tableName,
+ objectKey,
mandateId=self.mandateId,
featureInstanceId=self.featureInstanceId
)
@@ -680,8 +683,7 @@ class ChatObjects:
startedAt=workflow.get("startedAt", getUtcTimestamp()),
logs=logs,
messages=messages,
- stats=stats,
- mandateId=workflow.get("mandateId", self.mandateId)
+ stats=stats
)
except Exception as e:
logger.error(f"Error validating workflow data: {str(e)}")
@@ -702,9 +704,22 @@ class ChatObjects:
# Set mandateId and featureInstanceId from context for proper data isolation
if "mandateId" not in workflowData or not workflowData["mandateId"]:
- workflowData["mandateId"] = self.mandateId
- if "featureInstanceId" not in workflowData or not workflowData["featureInstanceId"]:
- workflowData["featureInstanceId"] = self.featureInstanceId
+ # Use request context mandateId, or fall back to Root mandate
+ effectiveMandateId = self.mandateId
+ if not effectiveMandateId:
+ # Fall back to Root mandate (first mandate in system)
+ try:
+ from modules.datamodels.datamodelUam import Mandate
+ from modules.security.rootAccess import getRootDbAppConnector
+ dbAppConn = getRootDbAppConnector()
+ allMandates = dbAppConn.getRecordset(Mandate)
+ if allMandates:
+ effectiveMandateId = allMandates[0].get("id")
+ logger.debug(f"createWorkflow: Using Root mandate {effectiveMandateId}")
+ except Exception as e:
+ logger.warning(f"Could not get Root mandate: {e}")
+ # Note: Chat data is user-owned, no mandate/featureInstance context stored
+ # mandateId/featureInstanceId removed from ChatWorkflow model
# Use generic field separation based on ChatWorkflow model
simpleFields, objectFields = self._separateObjectFields(ChatWorkflow, workflowData)
@@ -714,6 +729,7 @@ class ChatObjects:
# Convert to ChatWorkflow model (empty related data for new workflow)
+ # Note: Chat data is user-owned, no mandate/featureInstance fields
return ChatWorkflow(
id=created["id"],
status=created.get("status", "running"),
@@ -728,7 +744,6 @@ class ChatObjects:
logs=[],
messages=[],
stats=[],
- mandateId=created.get("mandateId", self.mandateId),
workflowMode=created["workflowMode"],
maxSteps=created.get("maxSteps", 1)
)
@@ -774,8 +789,7 @@ class ChatObjects:
startedAt=updated.get("startedAt", workflow.startedAt),
logs=logs,
messages=messages,
- stats=stats,
- mandateId=updated.get("mandateId", workflow.mandateId)
+ stats=stats
)
def deleteWorkflow(self, workflowId: str) -> bool:
@@ -886,7 +900,7 @@ class ChatObjects:
# Apply default sorting by publishedAt if no sort specified
if pagination is None or not pagination.sort:
- messageDicts.sort(key=lambda x: x.get("publishedAt", getUtcTimestamp()))
+ messageDicts.sort(key=lambda x: x.get("publishedAt") or getUtcTimestamp())
# Apply filtering (if filters provided)
if pagination and pagination.filters:
@@ -1026,11 +1040,8 @@ class ChatObjects:
if "actionNumber" not in messageData:
messageData["actionNumber"] = workflow.currentAction
- # Set mandateId and featureInstanceId from context for proper data isolation
- if "mandateId" not in messageData or not messageData["mandateId"]:
- messageData["mandateId"] = self.mandateId
- if "featureInstanceId" not in messageData or not messageData["featureInstanceId"]:
- messageData["featureInstanceId"] = self.featureInstanceId
+ # Note: Chat data is user-owned, no mandate/featureInstance context stored
+ # mandateId/featureInstanceId removed from ChatMessage model
# Use generic field separation based on ChatMessage model
simpleFields, objectFields = self._separateObjectFields(ChatMessage, messageData)
@@ -1306,11 +1317,8 @@ class ChatObjects:
def createDocument(self, documentData: Dict[str, Any]) -> ChatDocument:
"""Creates a document for a message in normalized table."""
try:
- # Set mandateId and featureInstanceId from context for proper data isolation
- if "mandateId" not in documentData or not documentData["mandateId"]:
- documentData["mandateId"] = self.mandateId
- if "featureInstanceId" not in documentData or not documentData["featureInstanceId"]:
- documentData["featureInstanceId"] = self.featureInstanceId
+ # Note: Chat data is user-owned, no mandate/featureInstance context stored
+ # mandateId/featureInstanceId removed from ChatDocument model
# Validate and normalize document data to dict
document = ChatDocument(**documentData)
@@ -1431,11 +1439,8 @@ class ChatObjects:
if "timestamp" not in logData:
logData["timestamp"] = getUtcTimestamp()
- # Set mandateId and featureInstanceId from context for proper data isolation
- if "mandateId" not in logData or not logData["mandateId"]:
- logData["mandateId"] = self.mandateId
- if "featureInstanceId" not in logData or not logData["featureInstanceId"]:
- logData["featureInstanceId"] = self.featureInstanceId
+ # Note: Chat data is user-owned, no mandate/featureInstance context stored
+ # mandateId/featureInstanceId removed from ChatLog model
# Add status information if not present
if "status" not in logData and "type" in logData:
@@ -1500,11 +1505,8 @@ class ChatObjects:
if "workflowId" not in statData:
raise ValueError("workflowId is required in statData")
- # Set mandateId and featureInstanceId from context for proper data isolation
- if "mandateId" not in statData or not statData["mandateId"]:
- statData["mandateId"] = self.mandateId
- if "featureInstanceId" not in statData or not statData["featureInstanceId"]:
- statData["featureInstanceId"] = self.featureInstanceId
+ # Note: Chat data is user-owned, no mandate/featureInstance context stored
+ # mandateId/featureInstanceId removed from ChatStat model
# Validate the stat data against ChatStat model
stat = ChatStat(**statData)
@@ -1783,8 +1785,22 @@ class ChatObjects:
automationData["id"] = str(uuid.uuid4())
# Ensure mandateId and featureInstanceId are set for proper data isolation
- if "mandateId" not in automationData:
- automationData["mandateId"] = self.mandateId
+ if "mandateId" not in automationData or not automationData.get("mandateId"):
+ # Use request context mandateId, or fall back to Root mandate
+ effectiveMandateId = self.mandateId
+ if not effectiveMandateId:
+ # Fall back to Root mandate (first mandate in system)
+ try:
+ from modules.datamodels.datamodelUam import Mandate
+ from modules.security.rootAccess import getRootDbAppConnector
+ dbAppConn = getRootDbAppConnector()
+ allMandates = dbAppConn.getRecordset(Mandate)
+ if allMandates:
+ effectiveMandateId = allMandates[0].get("id")
+ logger.debug(f"createAutomationDefinition: Using Root mandate {effectiveMandateId}")
+ except Exception as e:
+ logger.warning(f"Could not get Root mandate: {e}")
+ automationData["mandateId"] = effectiveMandateId
if "featureInstanceId" not in automationData:
automationData["featureInstanceId"] = self.featureInstanceId
diff --git a/modules/interfaces/interfaceRbac.py b/modules/interfaces/interfaceRbac.py
index aec97b5a..3e062048 100644
--- a/modules/interfaces/interfaceRbac.py
+++ b/modules/interfaces/interfaceRbac.py
@@ -7,6 +7,18 @@ Provides RBAC filtering for database queries without connectors importing securi
Multi-Tenant Design:
- mandateId kommt aus Request-Context (X-Mandate-Id Header)
- GROUP-Filter verwendet expliziten mandateId Parameter
+
+Data Namespace Structure:
+- data.uam.{Table} → User Access Management (mandantenübergreifend)
+- data.chat.{Table} → Chat/AI-Daten (benutzer-eigen, kein Mandantenkontext)
+- data.files.{Table} → Dateien (benutzer-eigen)
+- data.automation.{Table} → Automation (benutzer-eigen)
+- data.feature.{code}.{Table} → Mandanten-/Feature-spezifische Daten (dynamisch)
+
+GROUP-Berechtigung:
+- data.uam.*: GROUP filtert nach Mandant (via UserMandate)
+- data.chat.*, data.files.*, data.automation.*: GROUP = MY (benutzer-eigen, kein Mandantenkontext)
+- data.feature.*: GROUP filtert nach mandateId/featureInstanceId
"""
import logging
@@ -21,25 +33,70 @@ from modules.security.rootAccess import getRootDbAppConnector
logger = logging.getLogger(__name__)
+# =============================================================================
+# Namespace-Mapping für statische Tabellen
+# =============================================================================
+# Definiert, welcher Namespace für jede Tabelle verwendet wird.
+# Tabellen ohne Eintrag fallen auf "system" zurück (Fallback für Rückwärtskompatibilität).
+# =============================================================================
+
+TABLE_NAMESPACE = {
+ # UAM (User Access Management) - mandantenübergreifend
+ "UserInDB": "uam",
+ "UserConnection": "uam",
+ "AuthEvent": "uam",
+ "Mandate": "uam",
+ "UserMandate": "uam",
+ "UserMandateRole": "uam",
+ "Invitation": "uam",
+ "Role": "uam",
+ "AccessRule": "uam",
+ "FeatureInstance": "uam",
+ "FeatureAccess": "uam",
+ "FeatureAccessRole": "uam",
+ # Chat - benutzer-eigen, kein Mandantenkontext
+ "ChatWorkflow": "chat",
+ "ChatMessage": "chat",
+ "ChatLog": "chat",
+ "ChatStat": "chat",
+ "ChatDocument": "chat",
+ "Prompt": "chat",
+ # Files - benutzer-eigen
+ "FileItem": "files",
+ "FileData": "files",
+ # Automation - benutzer-eigen
+ "AutomationDefinition": "automation",
+}
+
+# Namespaces ohne Mandantenkontext - GROUP wird auf MY gemappt
+USER_OWNED_NAMESPACES = {"chat", "files", "automation"}
+
+
def buildDataObjectKey(tableName: str, featureCode: Optional[str] = None) -> str:
"""
Build the standardized objectKey for a DATA context item.
Format:
- - System tables: data.system.{TableName}
+ - UAM tables: data.uam.{TableName}
+ - Chat tables: data.chat.{TableName}
+ - File tables: data.files.{TableName}
+ - Automation tables: data.automation.{TableName}
- Feature tables: data.feature.{featureCode}.{TableName}
Args:
- tableName: The database table name (e.g., "UserInDB", "TrusteePosition")
+ tableName: The database table name (e.g., "UserInDB", "ChatWorkflow")
featureCode: Optional feature code (e.g., "trustee", "realestate")
- If None, assumes system table.
+ If provided, uses data.feature.{featureCode}.{tableName}
Returns:
- Full objectKey string (e.g., "data.system.UserInDB" or "data.feature.trustee.TrusteePosition")
+ Full objectKey string (e.g., "data.uam.UserInDB", "data.chat.ChatWorkflow",
+ or "data.feature.trustee.TrusteePosition")
"""
if featureCode:
return f"data.feature.{featureCode}.{tableName}"
- return f"data.system.{tableName}"
+
+ namespace = TABLE_NAMESPACE.get(tableName, "system") # Fallback für unbekannte Tabellen
+ return f"data.{namespace}.{tableName}"
def getRecordsetWithRBAC(
@@ -107,7 +164,7 @@ def getRecordsetWithRBAC(
permissions = rbacInstance.getUserPermissions(
currentUser,
AccessRuleContext.DATA,
- objectKey, # Use full objectKey (e.g., "data.system.UserInDB")
+ objectKey, # Use full objectKey (e.g., "data.uam.UserInDB", "data.chat.ChatWorkflow")
mandateId=effectiveMandateId,
featureInstanceId=featureInstanceId
)
@@ -271,10 +328,32 @@ def buildRbacWhereClause(
"values": [currentUser.id]
}
- # Group records - filter by mandateId
+ # Group records - filter by mandateId or ownership based on namespace
if readLevel == AccessLevel.GROUP:
+ # Determine namespace for this table
+ namespace = TABLE_NAMESPACE.get(table, "system")
+
+ # For user-owned namespaces (chat, files, automation):
+ # GROUP has no meaning - these tables have no mandate context
+ # Simply ignore GROUP (no filtering)
+ if namespace in USER_OWNED_NAMESPACES:
+ return None
+
+ # For UAM and other namespaces: GROUP filters by mandate
effectiveMandateId = mandateId
+ if not effectiveMandateId:
+ # Fall back to Root mandate (first mandate in system) for GROUP access
+ # This allows system-level tables to be accessed without explicit mandate context
+ try:
+ from modules.datamodels.datamodelUam import Mandate
+ dbApp = getRootDbAppConnector()
+ allMandates = dbApp.getRecordset(Mandate)
+ if allMandates:
+ effectiveMandateId = allMandates[0].get("id")
+ except Exception as e:
+ logger.error(f"Error getting Root mandate: {e}")
+
if not effectiveMandateId:
logger.warning(f"User {currentUser.id} has no mandateId for GROUP access")
return {"condition": "1 = 0", "values": []}
@@ -324,10 +403,16 @@ def buildRbacWhereClause(
logger.error(f"Error building GROUP filter for UserConnection: {e}")
return {"condition": "1 = 0", "values": []}
+ # For system tables without mandateId column (Mandate, Role, etc.):
+ # No row-level filtering - GROUP access = ALL access for these
+ elif table in ("Mandate", "Role"):
+ return None
+
# For other tables, filter by mandateId field
+ # Also include records with NULL mandateId for backwards compatibility
else:
return {
- "condition": '"mandateId" = %s',
+ "condition": '("mandateId" = %s OR "mandateId" IS NULL)',
"values": [effectiveMandateId]
}
diff --git a/modules/routes/routeAdminRbacRules.py b/modules/routes/routeAdminRbacRules.py
index d125bc2c..916caf38 100644
--- a/modules/routes/routeAdminRbacRules.py
+++ b/modules/routes/routeAdminRbacRules.py
@@ -184,9 +184,12 @@ async def get_all_permissions(
recordFilter={"userId": str(reqContext.user.id), "enabled": True}
)
+ logger.debug(f"UI/RESOURCE permissions: Found {len(userMandates)} UserMandates for user {reqContext.user.id}")
+
# Collect all role IDs the user has across all mandates
for userMandate in userMandates:
mandateRoleIds = rootInterface.getRoleIdsForUserMandate(userMandate.get("id"))
+ logger.debug(f"UI/RESOURCE permissions: UserMandate {userMandate.get('id')} (mandate {userMandate.get('mandateId')}) has {len(mandateRoleIds)} roles: {mandateRoleIds}")
for rid in mandateRoleIds:
if rid not in roleIds:
roleIds.append(rid)
@@ -261,20 +264,24 @@ async def get_all_permissions(
items.add(rule.item)
# For each item, calculate user permissions
+ # For UI/RESOURCE context: Calculate permissions directly from the collected rules
+ # (Don't use getUserPermissions with mandateId - that would limit to one mandate's roles)
for item in sorted(items):
- permissions = interface.rbac.getUserPermissions(
- reqContext.user, ctx, item,
- mandateId=reqContext.mandateId,
- featureInstanceId=reqContext.featureInstanceId
- )
+ # Find matching rules for this item from the already-collected rules
+ itemView = False
+ for rule in allRules[ctx]:
+ if rule.item == item and rule.view:
+ itemView = True
+ break
+
# Only include if user has view permission
- if permissions.view:
+ if itemView:
result[ctx.value.lower()][item] = {
- "view": permissions.view,
- "read": permissions.read.value if permissions.read else None,
- "create": permissions.create.value if permissions.create else None,
- "update": permissions.update.value if permissions.update else None,
- "delete": permissions.delete.value if permissions.delete else None
+ "view": True,
+ "read": None, # UI context doesn't use CRUD permissions
+ "create": None,
+ "update": None,
+ "delete": None
}
return result
diff --git a/modules/routes/routeDataConnections.py b/modules/routes/routeDataConnections.py
index 37200186..5d84efd9 100644
--- a/modules/routes/routeDataConnections.py
+++ b/modules/routes/routeDataConnections.py
@@ -331,13 +331,8 @@ async def create_connection(
detail=f"Unsupported connection type: {connection_data.get('type')}"
)
- # Get fresh copy of user from database
- user = interface.getUser(currentUser.id)
- if not user:
- raise HTTPException(
- status_code=status.HTTP_404_NOT_FOUND,
- detail="User not found"
- )
+ # Note: currentUser is already authenticated via JWT - no need to re-verify from database
+ # The getCurrentUser dependency already validated the user exists
# Always create a new connection with PENDING status
connection = interface.addUserConnection(
diff --git a/modules/routes/routeNotifications.py b/modules/routes/routeNotifications.py
index 2016a745..7c8cf9ad 100644
--- a/modules/routes/routeNotifications.py
+++ b/modules/routes/routeNotifications.py
@@ -19,6 +19,7 @@ from modules.datamodels.datamodelNotification import (
NotificationStatus,
NotificationAction
)
+from modules.datamodels.datamodelRbac import Role
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.shared.timeUtils import getUtcTimestamp
@@ -452,6 +453,17 @@ async def _handleInvitationAction(
mandateId = invitation.get("mandateId")
roleIds = invitation.get("roleIds", [])
+ # Ensure user gets the system "user" role for access to public UI elements (e.g. playground)
+ userRoles = rootInterface.db.getRecordset(
+ model_class=Role,
+ recordFilter={"roleLabel": "user"}
+ )
+ if userRoles:
+ userRoleId = userRoles[0].get("id")
+ if userRoleId and userRoleId not in roleIds:
+ roleIds = roleIds + [userRoleId]
+ logger.debug(f"Added system 'user' role {userRoleId} to invitation roles")
+
# Get mandate name for result message
mandates = rootInterface.db.getRecordset(
model_class=Mandate,
diff --git a/modules/security/rbac.py b/modules/security/rbac.py
index 34e80105..5d20d1fb 100644
--- a/modules/security/rbac.py
+++ b/modules/security/rbac.py
@@ -13,7 +13,7 @@ Multi-Tenant Design:
import logging
from typing import List, Optional, TYPE_CHECKING
from modules.datamodels.datamodelRbac import AccessRule, AccessRuleContext, Role
-from modules.datamodels.datamodelUam import User, UserPermissions, AccessLevel
+from modules.datamodels.datamodelUam import User, UserPermissions, AccessLevel, Mandate
from modules.datamodels.datamodelMembership import (
UserMandate,
UserMandateRole,
@@ -148,6 +148,9 @@ class RbacClass:
Get all role IDs for a user in the given context.
Uses UserMandate + UserMandateRole for the new multi-tenant model.
+ Also includes roles from the Root mandate (first mandate) if different
+ from the requested mandate, so system-level permissions are always available.
+
Args:
user: User object
mandateId: Mandate context
@@ -156,30 +159,40 @@ class RbacClass:
Returns:
List of role IDs
"""
- roleIds = []
-
- if not mandateId:
- return roleIds
+ roleIds = set() # Use set to avoid duplicates
try:
- # Lade UserMandate
- userMandates = self.dbApp.getRecordset(
- UserMandate,
- recordFilter={"userId": user.id, "mandateId": mandateId, "enabled": True}
- )
+ # Get Root mandate ID (first mandate in system)
+ allMandates = self.dbApp.getRecordset(Mandate)
+ rootMandateId = allMandates[0].get("id") if allMandates else None
- if not userMandates:
- return roleIds
+ # Collect mandates to check:
+ # - If mandateId provided: current mandate + Root mandate (if different)
+ # - If no mandateId: just Root mandate (for system-level access)
+ mandatesToCheck = []
+ if mandateId:
+ mandatesToCheck.append(mandateId)
+ if rootMandateId and rootMandateId not in mandatesToCheck:
+ mandatesToCheck.append(rootMandateId)
- userMandateId = userMandates[0].get("id")
-
- # Lade UserMandateRoles (Mandate-level roles)
- userMandateRoles = self.dbApp.getRecordset(
- UserMandateRole,
- recordFilter={"userMandateId": userMandateId}
- )
-
- roleIds.extend([r.get("roleId") for r in userMandateRoles if r.get("roleId")])
+ # Load roles from each mandate
+ for checkMandateId in mandatesToCheck:
+ userMandates = self.dbApp.getRecordset(
+ UserMandate,
+ recordFilter={"userId": user.id, "mandateId": checkMandateId, "enabled": True}
+ )
+
+ if userMandates:
+ userMandateId = userMandates[0].get("id")
+
+ # Lade UserMandateRoles (Mandate-level roles)
+ userMandateRoles = self.dbApp.getRecordset(
+ UserMandateRole,
+ recordFilter={"userMandateId": userMandateId}
+ )
+
+ foundRoles = [r.get("roleId") for r in userMandateRoles if r.get("roleId")]
+ roleIds.update(foundRoles)
# Load FeatureAccess + FeatureAccessRole (Instance-level roles)
if featureInstanceId:
@@ -200,12 +213,12 @@ class RbacClass:
recordFilter={"featureAccessId": featureAccessId}
)
- roleIds.extend([r.get("roleId") for r in featureAccessRoles if r.get("roleId")])
+ roleIds.update([r.get("roleId") for r in featureAccessRoles if r.get("roleId")])
except Exception as e:
logger.error(f"Error loading role IDs for user {user.id}: {e}")
- return roleIds
+ return list(roleIds)
def getRulesForUserBulk(
self,
@@ -388,7 +401,10 @@ class RbacClass:
Example: rule "data.feature.trustee" matches item "data.feature.trustee.TrusteePosition"
All items MUST use the full objectKey format:
- - System: data.system.{TableName} (e.g., "data.system.UserInDB")
+ - UAM: data.uam.{TableName} (e.g., "data.uam.UserInDB")
+ - Chat: data.chat.{TableName} (e.g., "data.chat.ChatWorkflow")
+ - Files: data.files.{TableName} (e.g., "data.files.FileItem")
+ - Automation: data.automation.{TableName} (e.g., "data.automation.AutomationDefinition")
- Feature: data.feature.{featureCode}.{TableName} (e.g., "data.feature.trustee.TrusteePosition")
- UI: ui.{area}.{page} (e.g., "ui.admin.users")
diff --git a/modules/system/mainSystem.py b/modules/system/mainSystem.py
index 24d1d410..644b121f 100644
--- a/modules/system/mainSystem.py
+++ b/modules/system/mainSystem.py
@@ -72,6 +72,7 @@ NAVIGATION_SECTIONS = [
"icon": "FaPlay",
"path": "/workflows/playground",
"order": 10,
+ "public": True,
},
{
"id": "chats",
@@ -80,6 +81,7 @@ NAVIGATION_SECTIONS = [
"icon": "FaListAlt",
"path": "/workflows/list",
"order": 20,
+ "public": True,
},
{
"id": "automations",
@@ -88,6 +90,7 @@ NAVIGATION_SECTIONS = [
"icon": "FaCogs",
"path": "/workflows/automations",
"order": 30,
+ "public": True,
},
],
},
@@ -297,72 +300,83 @@ UI_OBJECTS = _buildUiObjectsFromNavigation()
# =============================================================================
# System DATA Objects
# =============================================================================
+# Namespace structure:
+# - data.uam.* → User Access Management (mandantenübergreifend)
+# - data.chat.* → Chat/AI-Daten (benutzer-eigen, kein Mandantenkontext)
+# - data.files.* → Dateien (benutzer-eigen)
+# - data.automation.* → Automation (benutzer-eigen)
+# - data.feature.* → Mandanten-/Feature-spezifische Daten (dynamisch)
+# =============================================================================
DATA_OBJECTS = [
- # User/Auth tables
+ # UAM (User Access Management) - mandantenübergreifend
{
- "objectKey": "data.system.UserInDB",
+ "objectKey": "data.uam.UserInDB",
"label": {"en": "User", "de": "Benutzer", "fr": "Utilisateur"},
- "meta": {"table": "UserInDB"}
+ "meta": {"table": "UserInDB", "namespace": "uam"}
},
{
- "objectKey": "data.system.AuthEvent",
+ "objectKey": "data.uam.AuthEvent",
"label": {"en": "Auth Event", "de": "Auth-Ereignis", "fr": "Événement d'auth"},
- "meta": {"table": "AuthEvent"}
+ "meta": {"table": "AuthEvent", "namespace": "uam"}
},
{
- "objectKey": "data.system.UserConnection",
+ "objectKey": "data.uam.UserConnection",
"label": {"en": "Connection", "de": "Verbindung", "fr": "Connexion"},
- "meta": {"table": "UserConnection"}
+ "meta": {"table": "UserConnection", "namespace": "uam"}
},
- # Mandate/Membership tables
{
- "objectKey": "data.system.Mandate",
+ "objectKey": "data.uam.Mandate",
"label": {"en": "Mandate", "de": "Mandant", "fr": "Mandat"},
- "meta": {"table": "Mandate"}
+ "meta": {"table": "Mandate", "namespace": "uam"}
},
{
- "objectKey": "data.system.UserMandate",
+ "objectKey": "data.uam.UserMandate",
"label": {"en": "User Mandate", "de": "Benutzer-Mandant", "fr": "Mandat utilisateur"},
- "meta": {"table": "UserMandate"}
+ "meta": {"table": "UserMandate", "namespace": "uam"}
},
{
- "objectKey": "data.system.Invitation",
+ "objectKey": "data.uam.Invitation",
"label": {"en": "Invitation", "de": "Einladung", "fr": "Invitation"},
- "meta": {"table": "Invitation"}
+ "meta": {"table": "Invitation", "namespace": "uam"}
},
- # RBAC tables
{
- "objectKey": "data.system.Role",
+ "objectKey": "data.uam.Role",
"label": {"en": "Role", "de": "Rolle", "fr": "Rôle"},
- "meta": {"table": "Role"}
+ "meta": {"table": "Role", "namespace": "uam"}
},
{
- "objectKey": "data.system.AccessRule",
+ "objectKey": "data.uam.AccessRule",
"label": {"en": "Access Rule", "de": "Zugriffsregel", "fr": "Règle d'accès"},
- "meta": {"table": "AccessRule"}
+ "meta": {"table": "AccessRule", "namespace": "uam"}
},
- # Feature tables
{
- "objectKey": "data.system.FeatureInstance",
+ "objectKey": "data.uam.FeatureInstance",
"label": {"en": "Feature Instance", "de": "Feature-Instanz", "fr": "Instance de feature"},
- "meta": {"table": "FeatureInstance"}
+ "meta": {"table": "FeatureInstance", "namespace": "uam"}
},
- # Content tables
+ # Chat - benutzer-eigen, kein Mandantenkontext
{
- "objectKey": "data.system.Prompt",
+ "objectKey": "data.chat.Prompt",
"label": {"en": "Prompt", "de": "Prompt", "fr": "Prompt"},
- "meta": {"table": "Prompt"}
+ "meta": {"table": "Prompt", "namespace": "chat", "groupDisabled": True}
},
{
- "objectKey": "data.system.ChatWorkflow",
+ "objectKey": "data.chat.ChatWorkflow",
"label": {"en": "Chat Workflow", "de": "Chat-Workflow", "fr": "Workflow de chat"},
- "meta": {"table": "ChatWorkflow"}
+ "meta": {"table": "ChatWorkflow", "namespace": "chat", "groupDisabled": True}
},
+ # Files - benutzer-eigen
{
- "objectKey": "data.system.FileItem",
+ "objectKey": "data.files.FileItem",
"label": {"en": "File", "de": "Datei", "fr": "Fichier"},
- "meta": {"table": "FileItem"}
+ "meta": {"table": "FileItem", "namespace": "files", "groupDisabled": True}
+ },
+ # Automation - benutzer-eigen
+ {
+ "objectKey": "data.automation.AutomationDefinition",
+ "label": {"en": "Automation", "de": "Automatisierung", "fr": "Automatisation"},
+ "meta": {"table": "AutomationDefinition", "namespace": "automation", "groupDisabled": True}
},
]
diff --git a/modules/workflows/methods/methodSharepoint/actions/getExpensesFromPdf.py b/modules/workflows/methods/methodSharepoint/actions/getExpensesFromPdf.py
index c2ecb7c9..e583d8bf 100644
--- a/modules/workflows/methods/methodSharepoint/actions/getExpensesFromPdf.py
+++ b/modules/workflows/methods/methodSharepoint/actions/getExpensesFromPdf.py
@@ -360,7 +360,7 @@ async def _extractExpensesWithAi(services, fileContent: bytes, fileName: str, pr
from modules.datamodels.datamodelChat import ChatDocument
from modules.datamodels.datamodelDocref import DocumentReferenceList
- dbInterface = getDbInterface()
+ dbInterface = getDbInterface(services.user, mandateId=services.mandateId, featureInstanceId=featureInstanceId)
# Create file record
fileItem = dbInterface.createFile(
@@ -375,40 +375,52 @@ async def _extractExpensesWithAi(services, fileContent: bytes, fileName: str, pr
logger.info(f"Stored PDF {fileName} ({len(fileContent)} bytes) with fileId: {fileItem.id}")
# Step 2: Create ChatDocument referencing the file
- # Use workflow context if available
- workflowId = services.workflow.id if services.workflow else str(uuid.uuid4())
- messageId = f"expense_import_{workflowId}_{str(uuid.uuid4())[:8]}"
-
+ documentId = str(uuid.uuid4())
chatDocument = ChatDocument(
- id=str(uuid.uuid4()),
+ id=documentId,
mandateId=services.mandateId or "",
featureInstanceId=featureInstanceId or "",
- messageId=messageId,
+ messageId="", # Will be set when attached to message
fileId=fileItem.id,
fileName=fileName,
fileSize=len(fileContent),
mimeType="application/pdf"
)
- # Step 3: Create DocumentReferenceList for AI service
+ # Step 3: Create a proper message with the document attached to the workflow
+ # This ensures getChatDocumentsFromDocumentList can find the document via workflow.messages
+ messageData = {
+ "id": f"msg_expense_import_{str(uuid.uuid4())[:8]}",
+ "documentsLabel": f"expense_pdf_{fileName}",
+ "role": "user",
+ "status": "step",
+ "message": f"PDF document for expense extraction: {fileName}"
+ }
+
+ # Use storeMessageWithDocuments to properly create message + document and sync with workflow
+ createdMessage = services.chat.storeMessageWithDocuments(
+ services.workflow,
+ messageData,
+ [chatDocument.model_dump()]
+ )
+
+ # Update documentId to match the created document's actual ID
+ if createdMessage and createdMessage.documents:
+ documentId = createdMessage.documents[0].id
+
+ logger.info(f"Created message {createdMessage.id} with ChatDocument {documentId} for AI processing")
+
+ # Step 4: Create DocumentReferenceList for AI service
from modules.datamodels.datamodelDocref import DocumentItemReference
documentList = DocumentReferenceList(
references=[
DocumentItemReference(
- documentId=chatDocument.id,
+ documentId=documentId,
fileName=fileName
)
]
)
- # Step 4: Store the ChatDocument so AI service can retrieve it
- # The AI service uses getChatDocumentsFromDocumentList which queries the database
- from modules.interfaces.interfaceDbChat import getInterface as getChatInterface
- chatInterface = getChatInterface(services.user, mandateId=services.mandateId, featureInstanceId=featureInstanceId)
- chatInterface.createDocument(chatDocument.model_dump())
-
- logger.info(f"Created ChatDocument {chatDocument.id} for AI processing")
-
# Step 5: Call AI with documentList - let AI service handle everything
# (extraction, intent analysis, chunking, image processing)
options = AiCallOptions(
diff --git a/tests/unit/rbac/test_rbac_bootstrap.py b/tests/unit/rbac/test_rbac_bootstrap.py
index 05951264..e8b04f07 100644
--- a/tests/unit/rbac/test_rbac_bootstrap.py
+++ b/tests/unit/rbac/test_rbac_bootstrap.py
@@ -119,9 +119,9 @@ class TestRbacBootstrap:
# Should create multiple rules for different tables
assert db.recordCreate.call_count > 0
- # Check that Mandate table rules are created with full objectKey
+ # Check that Mandate table rules are created with full objectKey (UAM namespace)
mandateCalls = [call for call in db.recordCreate.call_args_list
- if call[0][1].item == "data.system.Mandate"]
+ if call[0][1].item == "data.uam.Mandate"]
assert len(mandateCalls) > 0
# Check that all roles have view=False and no access for Mandate
@@ -134,11 +134,11 @@ class TestRbacBootstrap:
def testInitRbacRulesSkipsIfExists(self):
"""Test that initRbacRules skips default rule creation if rules already exist, but adds missing table-specific rules."""
db = Mock()
- # Mock existing rules - include rules for ChatWorkflow and Prompt to prevent adding missing rules
+ # Mock existing rules - include rules for ChatWorkflow and AutomationDefinition to prevent adding missing rules
# Need rules for all required roles to fully prevent creation
- # Using full objectKey format: data.system.{TableName}
+ # Using semantic namespace format: data.chat.{TableName}, data.automation.{TableName}
existingRules = []
- for table in ["data.system.ChatWorkflow", "data.system.Prompt"]:
+ for table in ["data.chat.ChatWorkflow", "data.automation.AutomationDefinition"]:
for role in ["admin", "user", "viewer"]:
existingRules.append({
"id": f"rule_{table}_{role}",
diff --git a/tests/unit/rbac/test_rbac_permissions.py b/tests/unit/rbac/test_rbac_permissions.py
index a3387f92..b40bebe3 100644
--- a/tests/unit/rbac/test_rbac_permissions.py
+++ b/tests/unit/rbac/test_rbac_permissions.py
@@ -94,7 +94,7 @@ class TestRbacPermissionResolution:
AccessRule(
roleLabel="user",
context=AccessRuleContext.DATA,
- item="data.system.UserInDB", # Specific rule with full objectKey
+ item="data.uam.UserInDB", # Specific rule with UAM namespace
view=True,
read=AccessLevel.MY,
create=AccessLevel.NONE,
@@ -107,11 +107,11 @@ class TestRbacPermissionResolution:
rbac._getRulesForRole = mockGetRulesForRole
# Get permissions for UserInDB table - should use specific rule
- # Using full objectKey format: data.system.UserInDB
+ # Using UAM namespace: data.uam.UserInDB
permissions = rbac.getUserPermissions(
user,
AccessRuleContext.DATA,
- "data.system.UserInDB"
+ "data.uam.UserInDB"
)
# Most specific rule should win
@@ -253,29 +253,29 @@ class TestRbacPermissionResolution:
AccessRule(
roleLabel="user",
context=AccessRuleContext.DATA,
- item="data.system.UserInDB", # Table-level with full objectKey
+ item="data.uam.UserInDB", # Table-level with UAM namespace
view=True,
read=AccessLevel.MY
),
AccessRule(
roleLabel="user",
context=AccessRuleContext.DATA,
- item="data.system.UserInDB.email", # Field-level - most specific
+ item="data.uam.UserInDB.email", # Field-level - most specific
view=True,
read=AccessLevel.NONE
)
]
# Test exact match
- rule = rbac.findMostSpecificRule(rules, "data.system.UserInDB.email")
+ rule = rbac.findMostSpecificRule(rules, "data.uam.UserInDB.email")
assert rule is not None
- assert rule.item == "data.system.UserInDB.email"
+ assert rule.item == "data.uam.UserInDB.email"
assert rule.read == AccessLevel.NONE
# Test table-level match
- rule = rbac.findMostSpecificRule(rules, "data.system.UserInDB")
+ rule = rbac.findMostSpecificRule(rules, "data.uam.UserInDB")
assert rule is not None
- assert rule.item == "data.system.UserInDB"
+ assert rule.item == "data.uam.UserInDB"
assert rule.read == AccessLevel.MY
# Test generic fallback
@@ -294,7 +294,7 @@ class TestRbacPermissionResolution:
rule1 = AccessRule(
roleLabel="user",
context=AccessRuleContext.DATA,
- item="data.system.UserInDB",
+ item="data.uam.UserInDB",
view=True,
read=AccessLevel.MY,
create=AccessLevel.MY,
@@ -307,7 +307,7 @@ class TestRbacPermissionResolution:
rule2 = AccessRule(
roleLabel="user",
context=AccessRuleContext.DATA,
- item="data.system.UserInDB",
+ item="data.uam.UserInDB",
view=True,
read=AccessLevel.MY,
create=AccessLevel.GROUP, # Not allowed
@@ -320,7 +320,7 @@ class TestRbacPermissionResolution:
rule3 = AccessRule(
roleLabel="admin",
context=AccessRuleContext.DATA,
- item="data.system.UserInDB",
+ item="data.uam.UserInDB",
view=True,
read=AccessLevel.GROUP,
create=AccessLevel.GROUP,
@@ -333,7 +333,7 @@ class TestRbacPermissionResolution:
rule4 = AccessRule(
roleLabel="user",
context=AccessRuleContext.DATA,
- item="data.system.UserInDB",
+ item="data.uam.UserInDB",
view=True,
read=AccessLevel.NONE,
create=AccessLevel.MY, # Not allowed without read
From 5c4813b10a1e4531d0ab070a34f0bb2d95abf046 Mon Sep 17 00:00:00 2001
From: ValueOn AG
Date: Mon, 26 Jan 2026 14:54:47 +0100
Subject: [PATCH 28/32] workflow dynamic tested
---
.../services/serviceAi/subStructureFilling.py | 30 ++++++++++++++++++-
.../serviceAi/subStructureGeneration.py | 13 ++++++--
.../workflows/processing/workflowProcessor.py | 2 +-
3 files changed, 40 insertions(+), 5 deletions(-)
diff --git a/modules/services/serviceAi/subStructureFilling.py b/modules/services/serviceAi/subStructureFilling.py
index 5145ad54..8f63277f 100644
--- a/modules/services/serviceAi/subStructureFilling.py
+++ b/modules/services/serviceAi/subStructureFilling.py
@@ -1797,6 +1797,13 @@ LANGUAGE: Generate all content in {language.upper()} language. All text, titles,
CHAPTER: {chapterTitle} (Level {chapterLevel}, ID: {chapterId})
GENERATION HINT: {generationHint}
+## CONTENT EFFICIENCY PRINCIPLES
+- Generate COMPACT sections: Focus on essential information only
+- AVOID creating too many sections - combine related content where possible
+- Each section should serve a clear purpose with meaningful data
+- If no relevant data exists for a topic, do NOT create a section for it
+- Prefer ONE comprehensive section over multiple sparse sections
+
**CRITICAL**: The chapter's generationHint above describes what content this chapter should generate. If the generationHint references documents/images/data, then EACH section that generates content for this chapter MUST assign the relevant ContentParts from AVAILABLE CONTENT PARTS below.
NOTE: Chapter already has a heading section. Do NOT generate a heading for the chapter title.
@@ -2024,6 +2031,13 @@ LANGUAGE: Generate all content in {language.upper()} language. All text, titles,
- Content Type: {contentType}
- Generation Hint: {generationHint}
+## CONTENT EFFICIENCY PRINCIPLES
+- Generate COMPACT content: Focus on essential facts only
+- AVOID verbose text, filler phrases, or redundant explanations
+- Be CONCISE and direct - every word should add value
+- NO introductory phrases like "This section describes..." or "Here we present..."
+- Minimize output size for efficient processing
+
## INSTRUCTIONS
1. Extract all data from the context provided. Do not skip or omit any data.
2. Extract data only from the provided context. Never invent, create, or generate data that is not in the context.
@@ -2076,6 +2090,13 @@ LANGUAGE: Generate all content in {language.upper()} language. All text, titles,
- Content Type: {contentType}
- Generation Hint: {generationHint}
+## CONTENT EFFICIENCY PRINCIPLES
+- Generate COMPACT content: Focus on essential facts only
+- AVOID verbose text, filler phrases, or redundant explanations
+- Be CONCISE and direct - every word should add value
+- NO introductory phrases like "This section describes..." or "Here we present..."
+- Minimize output size for efficient processing
+
## AVAILABLE CONTENT FOR THIS SECTION
{contentPartsText}
@@ -2124,13 +2145,20 @@ LANGUAGE: Generate all content in {language.upper()} language. All text, titles,
- Content Type: {contentType}
- Generation Hint: {generationHint}
+## CONTENT EFFICIENCY PRINCIPLES
+- Generate COMPACT content: Focus on essential facts only
+- AVOID verbose text, filler phrases, or redundant explanations
+- Be CONCISE and direct - every word should add value
+- NO introductory phrases like "This section describes..." or "Here we present..."
+- Minimize output size for efficient processing
+
## INSTRUCTIONS
1. Generate content based on the Generation Hint above.
2. Create appropriate content that matches the content_type ({contentType}).
3. The content should be relevant to the USER REQUEST and fit the context of surrounding sections.
4. Return only valid JSON with "elements" array.
5. No HTML/styling: Plain text only, no markup.
-6. CONTINUE UNTIL COMPLETE: Extract ALL data from the provided context. Do NOT stop early because you think the response might be too long. Do NOT truncate or abbreviate. Do not impose artificial limits on yourself.
+6. Keep content CONCISE - focus on substance, not length.
## OUTPUT FORMAT
Return a JSON object with this structure:
diff --git a/modules/services/serviceAi/subStructureGeneration.py b/modules/services/serviceAi/subStructureGeneration.py
index 64624b84..67b045b3 100644
--- a/modules/services/serviceAi/subStructureGeneration.py
+++ b/modules/services/serviceAi/subStructureGeneration.py
@@ -420,8 +420,16 @@ CRITICAL RULE: If the user request mentions BOTH:
b) Generic content types (article text, main content, body text, etc.)
Then chapters that generate those generic content types MUST assign the relevant ContentParts, because the content should relate to or be based on the provided documents/images/data.
+## CONTENT EFFICIENCY PRINCIPLES
+- Generate COMPACT content: Focus on essential information only
+- AVOID verbose, lengthy, or repetitive text - be concise and direct
+- Prioritize FACTS over filler text - no introductions like "In this chapter..."
+- Minimize system resources: shorter content = faster processing
+- Quality over quantity: precise, meaningful content rather than padding
+
## CHAPTER STRUCTURE REQUIREMENTS
- Generate chapters based on USER REQUEST - analyze what structure the user wants
+- Create ONLY the minimum chapters needed to cover the user's request - avoid over-structuring
- IMPORTANT: Each chapter MUST have ALL these fields:
- id: Unique identifier (e.g., "chapter_1")
- level: Heading level (1, 2, 3, etc.)
@@ -431,9 +439,8 @@ Then chapters that generate those generic content types MUST assign the relevant
- sections: Empty array [] (REQUIRED - sections are generated in next phase)
- contentParts: {{"partId": {{"instruction": "..."}} or {{"caption": "..."}} or both}} - Assign ContentParts as required by CONTENT ASSIGNMENT RULE above
- The "instruction" field for each ContentPart MUST contain ALL relevant details from the USER REQUEST that apply to content extraction for this specific chapter. Include all formatting rules, data requirements, constraints, and specifications mentioned in the user request that are relevant for processing this ContentPart in this chapter.
-- generationHint: Description of what content to generate for this chapter
- The generationHint MUST contain ALL relevant details from the USER REQUEST that apply to this specific chapter. Include all formatting rules, data requirements, constraints, column specifications, validation rules, and any other specifications mentioned in the user request that are relevant for generating content for this chapter. Do NOT use generic descriptions - include specific details from the user request.
-- The number of chapters depends on the user request - create only what is requested
+- generationHint: Keep CONCISE but include relevant details from the USER REQUEST. Focus on WHAT to generate, not HOW to phrase it verbosely.
+- The number of chapters depends on the user request - create only what is requested. Do NOT create chapters for topics without available data.
CRITICAL: Only create chapters for CONTENT sections, not for formatting/styling requirements. Formatting/styling requirements to be included in each generationHint if needed.
diff --git a/modules/workflows/processing/workflowProcessor.py b/modules/workflows/processing/workflowProcessor.py
index 11879e9d..38763f51 100644
--- a/modules/workflows/processing/workflowProcessor.py
+++ b/modules/workflows/processing/workflowProcessor.py
@@ -13,7 +13,7 @@ from modules.workflows.processing.modes.modeBase import BaseMode
from modules.workflows.processing.modes.modeDynamic import DynamicMode
from modules.workflows.processing.modes.modeAutomation import AutomationMode
from modules.workflows.processing.shared.stateTools import checkWorkflowStopped
-from modules.datamodels.datamodelAi import OperationTypeEnum, PriorityEnum, ProcessingModeEnum, AiCallOptions
+from modules.datamodels.datamodelAi import OperationTypeEnum, PriorityEnum, ProcessingModeEnum, AiCallOptions, AiCallRequest
from modules.shared.jsonUtils import extractJsonString, repairBrokenJson
if TYPE_CHECKING:
From 97cbda0ef23eda9ef87399ec9b736a29f2197636 Mon Sep 17 00:00:00 2001
From: ValueOn AG
Date: Mon, 26 Jan 2026 23:26:30 +0100
Subject: [PATCH 29/32] fixed ai call end to end with saas multimandate
---
modules/aicore/aicoreModelSelector.py | 14 +-
modules/datamodels/datamodelChat.py | 7 +-
.../automation/subAutomationTemplates.py | 2 +-
.../trustee/datamodelFeatureTrustee.py | 33 +-
.../trustee/interfaceFeatureTrustee.py | 73 ++---
modules/routes/routeChat.py | 6 +-
modules/services/serviceAi/mainServiceAi.py | 39 ++-
.../services/serviceAi/subStructureFilling.py | 131 ++++----
modules/workflows/automation/mainWorkflow.py | 11 +-
.../actions/getExpensesFromPdf.py | 299 +++++++++++-------
.../methodSharepoint/helpers/apiClient.py | 12 +
11 files changed, 395 insertions(+), 232 deletions(-)
diff --git a/modules/aicore/aicoreModelSelector.py b/modules/aicore/aicoreModelSelector.py
index eeda64d9..8bebb2d7 100644
--- a/modules/aicore/aicoreModelSelector.py
+++ b/modules/aicore/aicoreModelSelector.py
@@ -72,10 +72,16 @@ class ModelSelector:
promptSize = len(prompt.encode("utf-8"))
contextSize = len(context.encode("utf-8"))
totalSize = promptSize + contextSize
- # Convert bytes to approximate tokens (1 token ≈ 4 bytes)
- promptTokens = promptSize / 4
- contextTokens = contextSize / 4
- totalTokens = totalSize / 4
+ # Convert bytes to approximate tokens
+ # Conservative estimate: 1 token ≈ 2 bytes (for safety margin)
+ # Note: Actual tokenization varies by content type and model
+ # - English text: ~4 bytes/token
+ # - Structured data/JSON: ~2-3 bytes/token
+ # - Base64/encoded data: ~1.5-2 bytes/token
+ bytesPerToken = 2 # Conservative estimate for mixed content
+ promptTokens = promptSize / bytesPerToken
+ contextTokens = contextSize / bytesPerToken
+ totalTokens = totalSize / bytesPerToken
logger.debug(f"Request sizes - Prompt: {promptTokens:.0f} tokens ({promptSize} bytes), Context: {contextTokens:.0f} tokens ({contextSize} bytes), Total: {totalTokens:.0f} tokens ({totalSize} bytes)")
diff --git a/modules/datamodels/datamodelChat.py b/modules/datamodels/datamodelChat.py
index 3d71bf63..22c07aa2 100644
--- a/modules/datamodels/datamodelChat.py
+++ b/modules/datamodels/datamodelChat.py
@@ -220,11 +220,12 @@ class ChatMessage(BaseModel):
)
role: str = Field(description="Role of the message sender")
status: str = Field(description="Status of the message (first, step, last)")
- sequenceNr: int = Field(
+ sequenceNr: Optional[int] = Field(
+ default=0,
description="Sequence number of the message (set automatically)"
)
- publishedAt: float = Field(
- default_factory=getUtcTimestamp,
+ publishedAt: Optional[float] = Field(
+ default=None,
description="When the message was published (UTC timestamp in seconds)",
)
success: Optional[bool] = Field(
diff --git a/modules/features/automation/subAutomationTemplates.py b/modules/features/automation/subAutomationTemplates.py
index e95ca04d..420203ec 100644
--- a/modules/features/automation/subAutomationTemplates.py
+++ b/modules/features/automation/subAutomationTemplates.py
@@ -399,7 +399,7 @@ AUTOMATION_TEMPLATES: Dict[str, Any] = {
"connectionName": "",
"sharepointFolder": "",
"featureInstanceId": "",
- "extractionPrompt": "Du bist ein Spezialist für die Extraktion von Spesendaten aus PDF-Dokumenten.\n\nAUFGABE:\nExtrahiere alle Speseneinträge aus dem bereitgestellten PDF-Dokument und gib sie im CSV-Format zurück.\n\nWICHTIGE REGELN:\n1. Pro MwSt-Prozentsatz einen separaten Datensatz erstellen\n2. Alle Datensätze zusammen müssen den Gesamtbetrag des Dokuments ergeben\n3. Der gesamte extrahierte Text des Dokuments muss im Feld \"desc\" erfasst werden\n4. Feld \"company\" enthält den Lieferanten/Verkäufer der Buchung\n5. Tags müssen aus dieser Liste gewählt werden: customer, meeting, license, subscription, fuel, food, material\n - Mehrere zutreffende Tags mit Komma trennen\n\nCSV-SPALTEN (in dieser Reihenfolge):\nvaluta,transactionDateTime,company,desc,tags,bookingCurrency,bookingAmount,originalCurrency,originalAmount,vatPercentage,vatAmount\n\nDATENFORMAT:\n- valuta: YYYY-MM-DD (Valutadatum)\n- transactionDateTime: Unix-Timestamp in Sekunden (Transaktionszeitpunkt)\n- company: Lieferant/Verkäufer Name\n- desc: Vollständiger extrahierter Text des Dokuments\n- tags: Komma-getrennte Tags aus der erlaubten Liste\n- bookingCurrency: Währungscode (CHF, EUR, USD, GBP)\n- bookingAmount: Buchungsbetrag als Dezimalzahl\n- originalCurrency: Original-Währungscode\n- originalAmount: Original-Betrag als Dezimalzahl\n- vatPercentage: MwSt-Prozentsatz (z.B. 8.1 für 8.1%)\n- vatAmount: MwSt-Betrag als Dezimalzahl\n\nBEISPIEL OUTPUT:\nvaluta,transactionDateTime,company,desc,tags,bookingCurrency,bookingAmount,originalCurrency,originalAmount,vatPercentage,vatAmount\n2026-01-15,1736953200,Migros AG,\"Einkauf Migros Zürich...\",food,CHF,45.50,CHF,45.50,2.6,1.15\n2026-01-15,1736953200,Migros AG,\"Einkauf Migros Zürich...\",material,CHF,12.30,CHF,12.30,8.1,0.92\n\nHINWEISE:\n- Wenn nur ein MwSt-Satz vorhanden ist, einen Datensatz erstellen\n- Wenn mehrere MwSt-Sätze vorhanden sind (z.B. Lebensmittel 2.6% und Non-Food 8.1%), separate Datensätze erstellen\n- Bei fehlenden Informationen: leeres Feld oder Standardwert\n- Keine Anführungszeichen um numerische Werte"
+ "extractionPrompt": "Du bist ein Spezialist für die Extraktion von Belegdaten aus PDF-Dokumenten.\n\nAUFGABE:\nExtrahiere die Daten aus dem bereitgestellten Zahlungsbeleg und erstelle EINE EINZIGE CSV-Tabelle mit allen Datensätzen.\n\nOUTPUT-STRUKTUR:\nErstelle genau EINE Tabelle mit den folgenden Spalten. Alle extrahierten Datensätze kommen in diese eine Tabelle als Zeilen.\n\nWICHTIGE REGELN:\n1. Pro MwSt-Prozentsatz einen separaten Datensatz (= Zeile) erstellen\n2. Alle Datensätze zusammen müssen den Gesamtbetrag des Dokuments ergeben\n3. Der gesamte extrahierte Text des Dokuments muss im Feld \"desc\" erfasst werden\n4. Feld \"company\" enthält den Lieferanten/Verkäufer der Buchung\n5. Tags müssen aus dieser Liste gewählt werden: customer, meeting, license, subscription, fuel, food, material\n - Mehrere zutreffende Tags mit Komma trennen\n\nCSV-SPALTEN (in dieser Reihenfolge):\nvaluta,transactionDateTime,company,desc,tags,bookingCurrency,bookingAmount,originalCurrency,originalAmount,vatPercentage,vatAmount\n\nDATENFORMAT:\n- valuta: YYYY-MM-DD (Valutadatum)\n- transactionDateTime: Unix-Timestamp in Sekunden (Transaktionszeitpunkt)\n- company: Lieferant/Verkäufer Name\n- desc: Vollständiger extrahierter Text des Dokuments\n- tags: Komma-getrennte Tags aus der erlaubten Liste\n- bookingCurrency: Währungscode (CHF, EUR, USD, GBP)\n- bookingAmount: Buchungsbetrag als Dezimalzahl\n- originalCurrency: Original-Währungscode\n- originalAmount: Original-Betrag als Dezimalzahl\n- vatPercentage: MwSt-Prozentsatz (z.B. 8.1 für 8.1%)\n- vatAmount: MwSt-Betrag als Dezimalzahl\n\nHINWEISE:\n- Wenn nur ein MwSt-Satz vorhanden ist, einen Datensatz erstellen\n- Wenn mehrere MwSt-Sätze vorhanden sind (z.B. Lebensmittel 2.6% und Non-Food 8.1%), separate Datensätze erstellen\n- Bei fehlenden Informationen: leeres Feld oder Standardwert"
}
}
]
diff --git a/modules/features/trustee/datamodelFeatureTrustee.py b/modules/features/trustee/datamodelFeatureTrustee.py
index 8b13dff1..d729c1e5 100644
--- a/modules/features/trustee/datamodelFeatureTrustee.py
+++ b/modules/features/trustee/datamodelFeatureTrustee.py
@@ -279,7 +279,10 @@ registerModelLabels(
class TrusteeDocument(BaseModel):
- """Contains document references and receipts for bookings.
+ """Contains document references for bookings.
+
+ Documents reference files in the central Files table via fileId.
+ This allows file content to be stored once and referenced by multiple features.
Note: organisationId and contractId removed as per architecture decision:
- The feature instance IS the organisation
@@ -294,11 +297,11 @@ class TrusteeDocument(BaseModel):
"frontend_required": False
}
)
- documentData: Optional[bytes] = Field(
+ fileId: Optional[str] = Field(
default=None,
- description="The file content (binary)",
+ description="Reference to central Files table (Files.id)",
json_schema_extra={
- "frontend_type": "file",
+ "frontend_type": "file_reference",
"frontend_readonly": False,
"frontend_required": False
}
@@ -321,6 +324,24 @@ class TrusteeDocument(BaseModel):
"frontend_options": "/api/trustee/mime-types/options"
}
)
+ sourceType: Optional[str] = Field(
+ default=None,
+ description="Source type (e.g., 'sharepoint', 'upload', 'email')",
+ json_schema_extra={
+ "frontend_type": "text",
+ "frontend_readonly": True,
+ "frontend_required": False
+ }
+ )
+ sourceLocation: Optional[str] = Field(
+ default=None,
+ description="Original source location (e.g., SharePoint path)",
+ json_schema_extra={
+ "frontend_type": "text",
+ "frontend_readonly": True,
+ "frontend_required": False
+ }
+ )
mandateId: Optional[str] = Field(
default=None,
description="Mandate ID (auto-set from context)",
@@ -349,9 +370,11 @@ registerModelLabels(
{"en": "Document", "fr": "Document", "de": "Dokument"},
{
"id": {"en": "ID", "fr": "ID", "de": "ID"},
- "documentData": {"en": "Document Data", "fr": "Données du document", "de": "Dokumentdaten"},
+ "fileId": {"en": "File Reference", "fr": "Référence du fichier", "de": "Datei-Referenz"},
"documentName": {"en": "Document Name", "fr": "Nom du document", "de": "Dokumentname"},
"documentMimeType": {"en": "MIME Type", "fr": "Type MIME", "de": "MIME-Typ"},
+ "sourceType": {"en": "Source Type", "fr": "Type de source", "de": "Quelltyp"},
+ "sourceLocation": {"en": "Source Location", "fr": "Emplacement source", "de": "Quellort"},
"mandateId": {"en": "Mandate", "fr": "Mandat", "de": "Mandat"},
"featureInstanceId": {"en": "Feature Instance", "fr": "Instance de fonctionnalité", "de": "Feature-Instanz"},
},
diff --git a/modules/features/trustee/interfaceFeatureTrustee.py b/modules/features/trustee/interfaceFeatureTrustee.py
index df8038f9..c3400752 100644
--- a/modules/features/trustee/interfaceFeatureTrustee.py
+++ b/modules/features/trustee/interfaceFeatureTrustee.py
@@ -177,8 +177,7 @@ class TrusteeObjects:
AccessRuleContext.DATA,
tableName,
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
if not permissions.view:
@@ -205,8 +204,7 @@ class TrusteeObjects:
AccessRuleContext.DATA,
tableName,
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
if not permissions.view:
@@ -270,8 +268,7 @@ class TrusteeObjects:
recordFilter=None,
orderBy="id",
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
logger.debug(f"getAllOrganisations: getRecordsetWithRBAC returned {len(records)} records")
@@ -364,8 +361,7 @@ class TrusteeObjects:
recordFilter=None,
orderBy="id",
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
# Users with ALL access level (from system RBAC) see all roles
@@ -475,8 +471,7 @@ class TrusteeObjects:
recordFilter=None,
orderBy="id",
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
# Users with ALL access level (from system RBAC) see all records
@@ -535,8 +530,7 @@ class TrusteeObjects:
recordFilter={"organisationId": organisationId},
orderBy="id",
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
return [TrusteeAccess(**{k: v for k, v in r.items() if not k.startswith("_")}) for r in records]
@@ -553,8 +547,7 @@ class TrusteeObjects:
recordFilter={"userId": userId},
orderBy="id",
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
# Users with ALL access level (from system RBAC) see all records
@@ -671,8 +664,7 @@ class TrusteeObjects:
recordFilter=None,
orderBy="id",
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
totalItems = len(records)
@@ -705,8 +697,7 @@ class TrusteeObjects:
recordFilter={"organisationId": organisationId},
orderBy="label",
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
return [TrusteeContract(**{k: v for k, v in r.items() if not k.startswith("_")}) for r in records]
@@ -780,8 +771,8 @@ class TrusteeObjects:
createdRecord = self.db.recordCreate(TrusteeDocument, data)
if createdRecord and createdRecord.get("id"):
- # Remove binary data and metadata from Pydantic model
- cleanedRecord = {k: v for k, v in createdRecord.items() if not k.startswith("_") and k != "documentData"}
+ # Remove metadata from Pydantic model
+ cleanedRecord = {k: v for k, v in createdRecord.items() if not k.startswith("_")}
return TrusteeDocument(**cleanedRecord)
return None
@@ -795,12 +786,25 @@ class TrusteeObjects:
return TrusteeDocument(**cleanedRecord)
def getDocumentData(self, documentId: str) -> Optional[bytes]:
- """Get document binary data."""
+ """Get document binary data via fileId reference to central Files table."""
records = self.db.getRecordset(TrusteeDocument, recordFilter={"id": documentId})
record = records[0] if records else None
- if record:
- return record.get("documentData")
- return None
+ if not record:
+ return None
+
+ # New model: fileId references central Files table
+ fileId = record.get("fileId")
+ if fileId:
+ from modules.interfaces.interfaceDbManagement import getInterface as getDbInterface
+ dbInterface = getDbInterface(self.currentUser, mandateId=self.mandateId, featureInstanceId=self.featureInstanceId)
+ fileData = dbInterface.getFileData(fileId)
+ if fileData:
+ return fileData
+ logger.warning(f"File data not found for fileId {fileId}")
+ return None
+
+ # Legacy fallback: documentData was stored directly (for migration)
+ return record.get("documentData")
def getAllDocuments(self, params: Optional[PaginationParams] = None) -> PaginatedResult:
"""Get all documents with RBAC filtering + feature-level access filtering (metadata only)."""
@@ -812,8 +816,7 @@ class TrusteeObjects:
recordFilter=None,
orderBy="documentName",
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
# Convert dicts to Pydantic objects (remove binary data and internal fields)
@@ -852,8 +855,7 @@ class TrusteeObjects:
recordFilter={"contractId": contractId},
orderBy="documentName",
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
result = []
@@ -960,8 +962,7 @@ class TrusteeObjects:
recordFilter=None,
orderBy="valuta",
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
# Convert dicts to Pydantic objects (remove internal fields)
@@ -1000,8 +1001,7 @@ class TrusteeObjects:
recordFilter={"contractId": contractId},
orderBy="valuta",
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
return [TrusteePosition(**{k: v for k, v in r.items() if not k.startswith("_")}) for r in records]
@@ -1015,8 +1015,7 @@ class TrusteeObjects:
recordFilter={"organisationId": organisationId},
orderBy="valuta",
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
return [TrusteePosition(**{k: v for k, v in r.items() if not k.startswith("_")}) for r in records]
@@ -1173,8 +1172,7 @@ class TrusteeObjects:
recordFilter={"positionId": positionId},
orderBy="id",
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
return [TrusteePositionDocument(**{k: v for k, v in r.items() if not k.startswith("_")}) for r in links]
@@ -1188,8 +1186,7 @@ class TrusteeObjects:
recordFilter={"documentId": documentId},
orderBy="id",
mandateId=self.mandateId,
- featureInstanceId=self.featureInstanceId,
- featureCode=self.FEATURE_CODE
+ featureInstanceId=self.featureInstanceId
)
return [TrusteePositionDocument(**{k: v for k, v in r.items() if not k.startswith("_")}) for r in links]
diff --git a/modules/routes/routeChat.py b/modules/routes/routeChat.py
index 22aa764e..137b4a99 100644
--- a/modules/routes/routeChat.py
+++ b/modules/routes/routeChat.py
@@ -53,7 +53,8 @@ async def start_workflow(
"""
try:
# Start or continue workflow using playground controller
- workflow = await chatStart(context.user, userInput, workflowMode, workflowId)
+ mandateId = str(context.mandateId) if context.mandateId else None
+ workflow = await chatStart(context.user, userInput, workflowMode, workflowId, mandateId=mandateId)
return workflow
@@ -75,7 +76,8 @@ async def stop_workflow(
"""Stops a running workflow."""
try:
# Stop workflow using playground controller
- workflow = await chatStop(context.user, workflowId)
+ mandateId = str(context.mandateId) if context.mandateId else None
+ workflow = await chatStop(context.user, workflowId, mandateId=mandateId)
return workflow
diff --git a/modules/services/serviceAi/mainServiceAi.py b/modules/services/serviceAi/mainServiceAi.py
index 296a8032..a728bafc 100644
--- a/modules/services/serviceAi/mainServiceAi.py
+++ b/modules/services/serviceAi/mainServiceAi.py
@@ -785,14 +785,39 @@ Respond with ONLY a JSON object in this exact format:
if part.data
])
- # Call AI with extracted content
- aiRequest = AiCallRequest(
- prompt=f"{prompt}\n\nExtracted Content:\n{contentText}",
- context="",
- options=options
- )
+ # Check content size and use chunking if needed
+ # Conservative estimate: 2 bytes per token, 80% of model limit for safety
+ contentSizeBytes = len(contentText.encode('utf-8'))
+ promptSizeBytes = len(prompt.encode('utf-8'))
+ totalSizeBytes = contentSizeBytes + promptSizeBytes
+ estimatedTokens = totalSizeBytes / 2 # Conservative: 2 bytes per token
- aiResponse = await self.callAi(aiRequest)
+ # Get max model context (use Claude's 200k as reference, 80% = 160k tokens)
+ maxSafeTokens = 160000
+
+ if estimatedTokens > maxSafeTokens:
+ # Content too large - use chunking via ExtractionService
+ logger.warning(f"Content too large for single AI call: ~{estimatedTokens:.0f} tokens (limit: {maxSafeTokens}). Using chunked processing.")
+
+ # Use ExtractionService for chunked processing
+ extractionService = self.services.extraction
+ aiResponse = await extractionService.processContentPartsWithPrompt(
+ contentParts=contentParts,
+ prompt=prompt,
+ aiObjects=self.aiObjects,
+ options=options,
+ operationId=extractOperationId,
+ parentOperationId=parentOperationId
+ )
+ else:
+ # Content fits - use single AI call
+ aiRequest = AiCallRequest(
+ prompt=f"{prompt}\n\nExtracted Content:\n{contentText}",
+ context="",
+ options=options
+ )
+
+ aiResponse = await self.callAi(aiRequest)
# Create response document
resultDocument = DocumentData(
diff --git a/modules/services/serviceAi/subStructureFilling.py b/modules/services/serviceAi/subStructureFilling.py
index 8f63277f..9b503567 100644
--- a/modules/services/serviceAi/subStructureFilling.py
+++ b/modules/services/serviceAi/subStructureFilling.py
@@ -567,7 +567,8 @@ class StructureFiller:
userPrompt: str,
all_sections_list: List[Dict[str, Any]],
language: str,
- calculateOverallProgress: callable
+ outputFormat: str = "txt",
+ calculateOverallProgress: callable = None
) -> List[Dict[str, Any]]:
"""
Process a single section and return its elements.
@@ -761,7 +762,8 @@ class StructureFiller:
allSections=all_sections_list,
sectionIndex=sectionIndex,
isAggregation=isAggregation,
- language=language
+ language=language,
+ outputFormat=outputFormat
)
sectionOperationId = f"{fillOperationId}_section_{sectionId}"
@@ -949,7 +951,8 @@ class StructureFiller:
allSections=all_sections_list,
sectionIndex=sectionIndex,
isAggregation=False,
- language=language
+ language=language,
+ outputFormat=outputFormat
)
sectionOperationId = f"{fillOperationId}_section_{sectionId}"
@@ -1214,7 +1217,8 @@ class StructureFiller:
allSections=all_sections_list,
sectionIndex=sectionIndex,
isAggregation=False,
- language=language
+ language=language,
+ outputFormat=outputFormat
)
sectionOperationId = f"{fillOperationId}_section_{sectionId}"
@@ -1540,6 +1544,7 @@ class StructureFiller:
for doc in chapterStructure.get("documents", []):
docId = doc.get("id", "unknown")
docLanguage = self._getDocumentLanguage(chapterStructure, docId)
+ docFormat = doc.get("outputFormat", "txt") # Get output format for this document
for chapter in doc.get("chapters", []):
chapterId = chapter.get("id", "unknown")
@@ -1555,7 +1560,8 @@ class StructureFiller:
"sectionIndex": sectionIndex,
"chapterSectionCount": chapterSectionCount,
"section": section,
- "docLanguage": docLanguage
+ "docLanguage": docLanguage,
+ "docFormat": docFormat # Include output format
})
logger.info(f"Starting FULLY PARALLEL section generation: {totalSections} sections across {totalChapters} chapters")
@@ -1577,6 +1583,7 @@ class StructureFiller:
userPrompt=userPrompt,
all_sections_list=all_sections_list,
language=taskInfo["docLanguage"],
+ outputFormat=taskInfo.get("docFormat", "txt"), # Pass output format
calculateOverallProgress=lambda *args: completedSections[0] / totalSections if totalSections > 0 else 1.0
)
@@ -1826,23 +1833,11 @@ If AVAILABLE CONTENT PARTS are listed above, then EVERY section that generates c
- If chapter's generationHint references documents/images/data AND section generates content for that chapter → section MUST assign relevant ContentParts
- Empty contentPartIds [] are only allowed if section generates content WITHOUT referencing any available ContentParts AND WITHOUT relating to chapter's generationHint
-## CONTENT TYPES
-Available content types for sections: table, bullet_list, heading, paragraph, code_block, image
+## ACCEPTED CONTENT TYPES FOR THIS FORMAT
+The document output format ({outputFormat}) accepts only the following content types:
+{', '.join(acceptedSectionTypes)}
-## ACCEPTED SECTION TYPES FOR THIS FORMAT
-The document output format ({outputFormat}) accepts only the following section types:
-{', '.join(acceptedSectionTypes) if acceptedSectionTypes else 'All section types'}
-
-**IMPORTANT**: Only create sections with content types from the accepted list above. Do not create sections with types that are not accepted by this format.
-
-## FORMAT-APPROPRIATE SECTION STRUCTURE
-When determining which sections to create for this chapter, consider the document's output format ({outputFormat}) and ensure sections are structured appropriately for that format:
-- Different formats have different capabilities and constraints
-- Structure sections to match what the format can effectively represent
-- Consider what content types work best for each format
-- Ensure the section structure aligns with the format's strengths and limitations
-- Select content types that are well-suited for the target format
-- **CRITICAL**: Only use section types from the ACCEPTED SECTION TYPES list above
+**CRITICAL**: Only create sections with content types from this list. Other types will fail.
useAiCall RULE (simple):
- useAiCall: true → Content needs AI processing (extract, transform, generate, filter, summarize)
@@ -1853,7 +1848,7 @@ RETURN JSON:
"sections": [
{{
"id": "section_1",
- "content_type": "paragraph",
+ "content_type": "{acceptedSectionTypes[0]}",
"contentPartIds": ["extracted_part_id"],
"generationHint": "Description of what to extract or generate",
"useAiCall": true,
@@ -1897,7 +1892,8 @@ Return only valid JSON. Do not include any explanatory text outside the JSON.
allSections: Optional[List[Dict[str, Any]]] = None,
sectionIndex: Optional[int] = None,
isAggregation: bool = False,
- language: str = "en"
+ language: str = "en",
+ outputFormat: str = "txt"
) -> tuple[str, str]:
"""Baue Prompt für Section-Generierung mit vollständigem Kontext."""
# Filtere None-Werte
@@ -2005,14 +2001,29 @@ Return only valid JSON. Do not include any explanatory text outside the JSON.
for next in nextSections:
contextText += f"- {next['id']} ({next['content_type']}): {next['generation_hint']}\n"
- contentStructureExample = self._getContentStructureExample(contentType)
+ # Get accepted section types for the output format
+ acceptedTypesAggr = self._getAcceptedSectionTypesForFormat(outputFormat)
+
+ # CRITICAL: If the section's content_type is not supported by the output format,
+ # use the first accepted type instead. E.g., CSV only supports 'table', so
+ # even if section says 'code_block', we must output as 'table'.
+ effectiveContentType = contentType
+ if contentType not in acceptedTypesAggr and acceptedTypesAggr:
+ effectiveContentType = acceptedTypesAggr[0]
+ logger.debug(f"Section {sectionId}: Content type '{contentType}' not supported by format '{outputFormat}', using '{effectiveContentType}' instead")
+
+ contentStructureExample = self._getContentStructureExample(effectiveContentType)
+
+ # Build format note for the prompt - purely dynamic from renderer
+ # Always show what types are accepted for this format
+ formatNoteAggr = f"\n- Target Output Format: {outputFormat.upper()} (accepted content types: {', '.join(acceptedTypesAggr)})"
# Create template structure explicitly (not extracted from prompt)
# This ensures exact identity between initial and continuation prompts
templateStructure = f"""{{
"elements": [
{{
- "type": "{contentType}",
+ "type": "{effectiveContentType}",
"content": {contentStructureExample}
}}
]
@@ -2022,14 +2033,14 @@ Return only valid JSON. Do not include any explanatory text outside the JSON.
prompt = f"""# TASK: Generate Section Content (Aggregation)
Return only valid JSON. No explanatory text, no comments, no markdown formatting outside JSON.
-If ContentParts have no data, return: {{"elements": [{{"type": "{contentType}", "content": {{"headers": [], "rows": []}}}}]}}
+If ContentParts have no data, return: {{"elements": [{{"type": "{effectiveContentType}", "content": {{"headers": [], "rows": []}}}}]}}
LANGUAGE: Generate all content in {language.upper()} language. All text, titles, headings, paragraphs, and content must be written in {language.upper()}.
## SECTION METADATA
- Section ID: {sectionId}
-- Content Type: {contentType}
-- Generation Hint: {generationHint}
+- Content Type: {effectiveContentType}
+- Generation Hint: {generationHint}{formatNoteAggr}
## CONTENT EFFICIENCY PRINCIPLES
- Generate COMPACT content: Focus on essential facts only
@@ -2044,7 +2055,7 @@ LANGUAGE: Generate all content in {language.upper()} language. All text, titles,
3. If the context contains no data, return empty structures (empty rows array for tables).
4. Aggregate all data into one element (e.g., one table).
5. For table: Extract all rows from the context. Return {{"headers": [...], "rows": []}} only if no data exists.
-6. Format based on content_type ({contentType}).
+6. Format based on content_type ({effectiveContentType}).
7. No HTML/styling: Plain text only, no markup.
8. CONTINUE UNTIL COMPLETE: Extract ALL data from the provided context. Do NOT stop early because you think the response might be too long. Do NOT truncate or abbreviate. Do not impose artificial limits on yourself.
@@ -2055,7 +2066,7 @@ Return a JSON object with this structure:
{{
"elements": [
{{
- "type": "{contentType}",
+ "type": "{effectiveContentType}",
"content": {contentStructureExample}
}}
]
@@ -2087,8 +2098,8 @@ LANGUAGE: Generate all content in {language.upper()} language. All text, titles,
## SECTION METADATA
- Section ID: {sectionId}
-- Content Type: {contentType}
-- Generation Hint: {generationHint}
+- Content Type: {effectiveContentType}
+- Generation Hint: {generationHint}{formatNoteAggr}
## CONTENT EFFICIENCY PRINCIPLES
- Generate COMPACT content: Focus on essential facts only
@@ -2103,7 +2114,7 @@ LANGUAGE: Generate all content in {language.upper()} language. All text, titles,
## INSTRUCTIONS
1. Extract data only from provided ContentParts. Never invent or generate data.
2. If ContentParts contain no data, return empty structures (empty rows array for tables).
-3. Format based on content_type ({contentType}).
+3. Format based on content_type ({effectiveContentType}).
4. Return only valid JSON with "elements" array.
5. No HTML/styling: Plain text only, no markup.
6. CONTINUE UNTIL COMPLETE: Extract ALL data from the provided context. Do NOT stop early because you think the response might be too long. Do NOT truncate or abbreviate. Do not impose artificial limits on yourself.
@@ -2114,7 +2125,7 @@ Return a JSON object with this structure:
{{
"elements": [
{{
- "type": "{contentType}",
+ "type": "{effectiveContentType}",
"content": {contentStructureExample}
}}
]
@@ -2142,8 +2153,8 @@ LANGUAGE: Generate all content in {language.upper()} language. All text, titles,
## SECTION METADATA
- Section ID: {sectionId}
-- Content Type: {contentType}
-- Generation Hint: {generationHint}
+- Content Type: {effectiveContentType}
+- Generation Hint: {generationHint}{formatNoteAggr}
## CONTENT EFFICIENCY PRINCIPLES
- Generate COMPACT content: Focus on essential facts only
@@ -2154,7 +2165,7 @@ LANGUAGE: Generate all content in {language.upper()} language. All text, titles,
## INSTRUCTIONS
1. Generate content based on the Generation Hint above.
-2. Create appropriate content that matches the content_type ({contentType}).
+2. Create appropriate content that matches the content_type ({effectiveContentType}).
3. The content should be relevant to the USER REQUEST and fit the context of surrounding sections.
4. Return only valid JSON with "elements" array.
5. No HTML/styling: Plain text only, no markup.
@@ -2166,7 +2177,7 @@ Return a JSON object with this structure:
{{
"elements": [
{{
- "type": "{contentType}",
+ "type": "{effectiveContentType}",
"content": {contentStructureExample}
}}
]
@@ -2557,28 +2568,26 @@ CRITICAL:
Returns:
List of accepted section content types (e.g., ["table", "code_block"])
+
+ Raises:
+ ValueError: If renderer not found or doesn't provide accepted types
"""
- try:
- from modules.services.serviceGeneration.renderers.registry import getRenderer
-
- # Get renderer for this format
- renderer = getRenderer(outputFormat, self.services)
-
- if renderer and hasattr(renderer, 'getAcceptedSectionTypes'):
- # Query renderer for accepted types
- acceptedTypes = renderer.getAcceptedSectionTypes(outputFormat)
- if acceptedTypes:
- logger.debug(f"Renderer for format '{outputFormat}' accepts section types: {acceptedTypes}")
- return acceptedTypes
-
- # Fallback: if no renderer or method not found, return all types
- from modules.datamodels.datamodelJson import supportedSectionTypes
- logger.debug(f"No renderer found for format '{outputFormat}' or method not available, using all section types")
- return list(supportedSectionTypes)
-
- except Exception as e:
- logger.warning(f"Error querying renderer for accepted section types for format '{outputFormat}': {str(e)}")
- # Fallback: return all types
- from modules.datamodels.datamodelJson import supportedSectionTypes
- return list(supportedSectionTypes)
+ from modules.services.serviceGeneration.renderers.registry import getRenderer
+
+ # Get renderer for this format - NO FALLBACK
+ renderer = getRenderer(outputFormat, self.services)
+
+ if not renderer:
+ raise ValueError(f"No renderer found for output format '{outputFormat}'. Check renderer registry.")
+
+ if not hasattr(renderer, 'getAcceptedSectionTypes'):
+ raise ValueError(f"Renderer for '{outputFormat}' does not implement getAcceptedSectionTypes(). Add this method to the renderer.")
+
+ acceptedTypes = renderer.getAcceptedSectionTypes(outputFormat)
+
+ if not acceptedTypes:
+ raise ValueError(f"Renderer for '{outputFormat}' returned empty accepted types. Fix getAcceptedSectionTypes() in the renderer.")
+
+ logger.debug(f"Renderer for '{outputFormat}' accepts: {acceptedTypes}")
+ return acceptedTypes
diff --git a/modules/workflows/automation/mainWorkflow.py b/modules/workflows/automation/mainWorkflow.py
index 503d1d13..19cd1004 100644
--- a/modules/workflows/automation/mainWorkflow.py
+++ b/modules/workflows/automation/mainWorkflow.py
@@ -24,7 +24,7 @@ from .subAutomationUtils import parseScheduleToCron, planToPrompt, replacePlaceh
logger = logging.getLogger(__name__)
-async def chatStart(currentUser: User, userInput: UserInputRequest, workflowMode: WorkflowModeEnum, workflowId: Optional[str] = None) -> ChatWorkflow:
+async def chatStart(currentUser: User, userInput: UserInputRequest, workflowMode: WorkflowModeEnum, workflowId: Optional[str] = None, mandateId: Optional[str] = None) -> ChatWorkflow:
"""
Starts a new chat or continues an existing one, then launches processing asynchronously.
@@ -33,12 +33,13 @@ async def chatStart(currentUser: User, userInput: UserInputRequest, workflowMode
userInput: User input request
workflowId: Optional workflow ID to continue existing workflow
workflowMode: "Dynamic" for iterative dynamic-style processing, "Automation" for automated workflow execution
+ mandateId: Mandate ID from request context (required for proper data isolation)
Example usage for Dynamic mode:
- workflow = await chatStart(currentUser, userInput, workflowMode=WorkflowModeEnum.WORKFLOW_DYNAMIC)
+ workflow = await chatStart(currentUser, userInput, workflowMode=WorkflowModeEnum.WORKFLOW_DYNAMIC, mandateId=mandateId)
"""
try:
- services = getServices(currentUser, None)
+ services = getServices(currentUser, mandateId=mandateId)
workflowManager = WorkflowManager(services)
workflow = await workflowManager.workflowStart(userInput, workflowMode, workflowId)
return workflow
@@ -46,10 +47,10 @@ async def chatStart(currentUser: User, userInput: UserInputRequest, workflowMode
logger.error(f"Error starting chat: {str(e)}")
raise
-async def chatStop(currentUser: User, workflowId: str) -> ChatWorkflow:
+async def chatStop(currentUser: User, workflowId: str, mandateId: Optional[str] = None) -> ChatWorkflow:
"""Stops a running chat."""
try:
- services = getServices(currentUser, None)
+ services = getServices(currentUser, mandateId=mandateId)
workflowManager = WorkflowManager(services)
return await workflowManager.workflowStop(workflowId)
except Exception as e:
diff --git a/modules/workflows/methods/methodSharepoint/actions/getExpensesFromPdf.py b/modules/workflows/methods/methodSharepoint/actions/getExpensesFromPdf.py
index e583d8bf..21de7537 100644
--- a/modules/workflows/methods/methodSharepoint/actions/getExpensesFromPdf.py
+++ b/modules/workflows/methods/methodSharepoint/actions/getExpensesFromPdf.py
@@ -29,6 +29,7 @@ logger = logging.getLogger(__name__)
# Configuration
MAX_FILES_PER_EXECUTION = 50
+MAX_CONCURRENT_AI_TASKS = 10 # Limit concurrent AI calls to avoid rate limits
ALLOWED_TAGS = ["customer", "meeting", "license", "subscription", "fuel", "food", "material"]
RATE_LIMIT_WAIT_SECONDS = 60
@@ -92,6 +93,11 @@ async def getExpensesFromPdf(self, parameters: Dict[str, Any]) -> ActionResult:
self.services.chat.progressLogFinish(operationId, False)
return ActionResult.isFailure(error="No valid Microsoft connection found")
+ # Set access token for SharePoint service
+ if not self.services.sharepoint.setAccessTokenFromConnection(connection):
+ self.services.chat.progressLogFinish(operationId, False)
+ return ActionResult.isFailure(error="Failed to set SharePoint access token")
+
# Find site and folder info
self.services.chat.progressLogUpdate(operationId, 0.1, "Resolving SharePoint site")
siteInfo, folderPath = await _resolveSiteAndFolder(self, sharepointFolder)
@@ -137,90 +143,104 @@ async def getExpensesFromPdf(self, parameters: Dict[str, Any]) -> ActionResult:
featureInstanceId=featureInstanceId
)
- # Process each PDF
- for idx, pdfFile in enumerate(pdfFiles):
- currentProgress = 0.2 + (idx * progressPerFile)
+ # Process PDFs in parallel with semaphore to limit concurrent AI calls
+ semaphore = asyncio.Semaphore(MAX_CONCURRENT_AI_TASKS)
+ completedCount = [0] # Use list for mutable reference in closure
+
+ async def processSinglePdf(idx: int, pdfFile: Dict[str, Any]) -> Dict[str, Any]:
+ """Process a single PDF document. Returns result dict."""
fileName = pdfFile.get("name", f"file_{idx}")
fileId = pdfFile.get("id")
- self.services.chat.progressLogUpdate(
- operationId,
- currentProgress,
- f"Processing {idx + 1}/{totalFiles}: {fileName}"
- )
-
- try:
- # Download PDF content
- fileContent = await self.services.sharepoint.downloadFile(siteId, fileId)
- if not fileContent:
- await _moveToErrorFolder(self, siteId, folderPath, fileName)
- errorDocuments.append({
+ async with semaphore:
+ # Update progress (thread-safe via asyncio)
+ completedCount[0] += 1
+ currentProgress = 0.2 + (completedCount[0] * progressPerFile)
+ self.services.chat.progressLogUpdate(
+ operationId,
+ min(currentProgress, 0.9),
+ f"Processing {completedCount[0]}/{totalFiles}: {fileName}"
+ )
+
+ try:
+ # Download PDF content
+ fileContent = await self.services.sharepoint.downloadFile(siteId, fileId)
+ if not fileContent:
+ await _moveToErrorFolder(self, siteId, folderPath, fileName)
+ return {"type": "error", "file": fileName, "error": "Failed to download", "movedTo": "error/"}
+
+ # AI call to extract expense data (this is the bottleneck - parallelized)
+ aiResult = await _extractExpensesWithAi(self.services, fileContent, fileName, prompt, featureInstanceId)
+
+ if not aiResult.get("success"):
+ await _moveToErrorFolder(self, siteId, folderPath, fileName)
+ return {"type": "error", "file": fileName, "error": aiResult.get("error", "AI extraction failed"), "movedTo": "error/"}
+
+ records = aiResult.get("records", [])
+ fileId = aiResult.get("fileId")
+
+ # Check for empty records
+ if not records:
+ logger.warning(f"Document {fileName}: No records extracted, moving to error folder")
+ await _moveToErrorFolder(self, siteId, folderPath, fileName)
+ return {"type": "skipped", "file": fileName, "reason": "No expense records extracted", "movedTo": "error/"}
+
+ # Validate and enrich records
+ validatedRecords = _validateAndEnrichRecords(records, fileName)
+
+ # Save to TrusteePosition and create Document + Position-Document links
+ savedCount = _saveToTrusteePosition(
+ trusteeInterface,
+ validatedRecords,
+ featureInstanceId,
+ self.services.mandateId,
+ fileId=fileId,
+ fileName=fileName,
+ sourceLocation=sharepointFolder
+ )
+
+ # Move document to "processed" subfolder
+ timestamp = datetime.now(UTC).strftime("%Y%m%d-%H%M%S")
+ newFileName = f"{timestamp}_{fileName}"
+
+ moveSuccess = await _moveToProcessedFolder(self, siteId, folderPath, fileName, newFileName)
+
+ return {
+ "type": "processed",
"file": fileName,
- "error": "Failed to download",
- "movedTo": "error/"
- })
- continue
-
- # AI call to extract expense data
- aiResult = await _extractExpensesWithAi(self.services, fileContent, fileName, prompt, featureInstanceId)
-
- if not aiResult.get("success"):
+ "newLocation": f"processed/{newFileName}" if moveSuccess else "move_failed",
+ "recordsExtracted": len(validatedRecords),
+ "recordsSaved": savedCount
+ }
+
+ except Exception as e:
+ errorMsg = str(e)
+ logger.error(f"Error processing {fileName}: {errorMsg}")
+
+ # Handle rate limit
+ if "429" in errorMsg or "throttl" in errorMsg.lower():
+ logger.warning(f"Rate limit hit, waiting {RATE_LIMIT_WAIT_SECONDS} seconds")
+ await asyncio.sleep(RATE_LIMIT_WAIT_SECONDS)
+
await _moveToErrorFolder(self, siteId, folderPath, fileName)
- errorDocuments.append({
- "file": fileName,
- "error": aiResult.get("error", "AI extraction failed"),
- "movedTo": "error/"
- })
- continue
-
- records = aiResult.get("records", [])
-
- # Check for empty records
- if not records:
- logger.warning(f"Document {fileName}: No records extracted, moving to error folder")
- await _moveToErrorFolder(self, siteId, folderPath, fileName)
- skippedDocuments.append({
- "file": fileName,
- "reason": "No expense records extracted",
- "movedTo": "error/"
- })
- continue
-
- # Validate and enrich records
- validatedRecords = _validateAndEnrichRecords(records, fileName)
-
- # Save to TrusteePosition
- savedCount = _saveToTrusteePosition(trusteeInterface, validatedRecords, featureInstanceId, self.services.mandateId)
- totalPositions += savedCount
-
- # Move document to "processed" subfolder
- timestamp = datetime.now(UTC).strftime("%Y%m%d-%H%M%S")
- newFileName = f"{timestamp}_{fileName}"
-
- moveSuccess = await _moveToProcessedFolder(self, siteId, folderPath, fileName, newFileName)
-
- processedDocuments.append({
- "file": fileName,
- "newLocation": f"processed/{newFileName}" if moveSuccess else "move_failed",
- "recordsExtracted": len(validatedRecords),
- "recordsSaved": savedCount
- })
-
- except Exception as e:
- errorMsg = str(e)
- logger.error(f"Error processing {fileName}: {errorMsg}")
-
- # Handle rate limit
- if "429" in errorMsg or "throttl" in errorMsg.lower():
- logger.warning(f"Rate limit hit, waiting {RATE_LIMIT_WAIT_SECONDS} seconds")
- await asyncio.sleep(RATE_LIMIT_WAIT_SECONDS)
-
- await _moveToErrorFolder(self, siteId, folderPath, fileName)
- errorDocuments.append({
- "file": fileName,
- "error": errorMsg,
- "movedTo": "error/"
- })
+ return {"type": "error", "file": fileName, "error": errorMsg, "movedTo": "error/"}
+
+ # Execute all PDF processing tasks in parallel (limited by semaphore)
+ logger.info(f"Starting parallel processing of {totalFiles} PDFs (max {MAX_CONCURRENT_AI_TASKS} concurrent)")
+ tasks = [processSinglePdf(idx, pdfFile) for idx, pdfFile in enumerate(pdfFiles)]
+ results = await asyncio.gather(*tasks, return_exceptions=True)
+
+ # Collect results
+ for result in results:
+ if isinstance(result, Exception):
+ errorDocuments.append({"file": "unknown", "error": str(result), "movedTo": "error/"})
+ elif result.get("type") == "processed":
+ processedDocuments.append(result)
+ totalPositions += result.get("recordsSaved", 0)
+ elif result.get("type") == "skipped":
+ skippedDocuments.append(result)
+ elif result.get("type") == "error":
+ errorDocuments.append(result)
# Create result summary
self.services.chat.progressLogUpdate(operationId, 0.95, "Creating result summary")
@@ -423,9 +443,10 @@ async def _extractExpensesWithAi(services, fileContent: bytes, fileName: str, pr
# Step 5: Call AI with documentList - let AI service handle everything
# (extraction, intent analysis, chunking, image processing)
+ # Use DATA_GENERATE (same path as ai.process) which handles chunking correctly
options = AiCallOptions(
resultFormat="csv",
- operationType=OperationTypeEnum.DATA_EXTRACT
+ operationType=OperationTypeEnum.DATA_GENERATE
)
aiResponse = await services.ai.callAiContent(
@@ -433,17 +454,31 @@ async def _extractExpensesWithAi(services, fileContent: bytes, fileName: str, pr
options=options,
documentList=documentList,
contentParts=None, # Let AI service extract from documents
- outputFormat="csv"
+ outputFormat="csv",
+ generationIntent="extract" # Signal this is extraction, not document generation
)
- if not aiResponse or not aiResponse.content:
+ if not aiResponse:
return {"success": False, "error": "AI returned empty response"}
- # Parse CSV response
- csvContent = aiResponse.content
+ # Get CSV from rendered documents (not from content - that's the internal structure)
+ if not aiResponse.documents or len(aiResponse.documents) == 0:
+ return {"success": False, "error": "AI returned no documents"}
+
+ # Get the CSV content from the first document
+ csvDocument = aiResponse.documents[0]
+ csvContent = csvDocument.documentData
+
+ # documentData is bytes, decode to string
+ if isinstance(csvContent, bytes):
+ csvContent = csvContent.decode('utf-8')
+
+ logger.info(f"Retrieved CSV content ({len(csvContent)} chars) from rendered document: {csvDocument.documentName}")
+
records = _parseCsvToRecords(csvContent)
- return {"success": True, "records": records}
+ # Return fileId so it can be used to create TrusteeDocument reference
+ return {"success": True, "records": records, "fileId": fileItem.id}
except Exception as e:
logger.error(f"AI extraction error for {fileName}: {str(e)}")
@@ -454,8 +489,9 @@ def _parseCsvToRecords(csvContent: str) -> List[Dict[str, Any]]:
"""Parse CSV content to list of expense records."""
records = []
try:
- # Clean up CSV content - remove markdown code blocks if present
content = csvContent.strip()
+
+ # Clean up CSV content - remove markdown code blocks if present
if content.startswith("```"):
lines = content.split('\n')
# Remove first and last line if they're code block markers
@@ -470,6 +506,8 @@ def _parseCsvToRecords(csvContent: str) -> List[Dict[str, Any]]:
# Clean up keys (remove whitespace)
cleanedRow = {k.strip(): v.strip() if isinstance(v, str) else v for k, v in row.items()}
records.append(cleanedRow)
+
+ logger.info(f"Parsed {len(records)} records from CSV content")
except Exception as e:
logger.error(f"Error parsing CSV: {str(e)}")
@@ -548,10 +586,54 @@ def _parseFloat(value) -> float:
return 0.0
-def _saveToTrusteePosition(trusteeInterface, records: List[Dict[str, Any]], featureInstanceId: str, mandateId: str) -> int:
- """Save validated records to TrusteePosition table."""
- savedCount = 0
+def _saveToTrusteePosition(
+ trusteeInterface,
+ records: List[Dict[str, Any]],
+ featureInstanceId: str,
+ mandateId: str,
+ fileId: Optional[str] = None,
+ fileName: Optional[str] = None,
+ sourceLocation: Optional[str] = None
+) -> int:
+ """
+ Save validated records to TrusteePosition table.
+ Also creates TrusteeDocument (referencing the source file) and links positions to it.
+ Args:
+ trusteeInterface: Trustee interface instance
+ records: List of expense records to save
+ featureInstanceId: Feature instance ID
+ mandateId: Mandate ID
+ fileId: Optional file ID from central Files table (source PDF)
+ fileName: Optional file name
+ sourceLocation: Optional source location (e.g., SharePoint path)
+
+ Returns:
+ Number of positions saved
+ """
+ savedCount = 0
+ savedPositionIds = []
+
+ # Step 1: Create TrusteeDocument referencing the source file
+ documentId = None
+ if fileId and fileName:
+ try:
+ document = trusteeInterface.createDocument({
+ "fileId": fileId,
+ "documentName": fileName,
+ "documentMimeType": "application/pdf",
+ "sourceType": "sharepoint",
+ "sourceLocation": sourceLocation
+ })
+ if document:
+ documentId = document.id
+ logger.info(f"Created TrusteeDocument {documentId} referencing file {fileId}")
+ else:
+ logger.warning(f"Failed to create TrusteeDocument for file {fileId}")
+ except Exception as e:
+ logger.error(f"Error creating TrusteeDocument: {str(e)}")
+
+ # Step 2: Save positions
for record in records:
try:
position = {
@@ -573,11 +655,27 @@ def _saveToTrusteePosition(trusteeInterface, records: List[Dict[str, Any]], feat
result = trusteeInterface.createPosition(position)
if result:
savedCount += 1
+ savedPositionIds.append(result.id)
logger.debug(f"Saved position: {position.get('company')} - {position.get('bookingAmount')}")
except Exception as e:
logger.error(f"Failed to save position: {str(e)}")
+ # Step 3: Create Position-Document links
+ if documentId and savedPositionIds:
+ for positionId in savedPositionIds:
+ try:
+ link = trusteeInterface.createPositionDocument({
+ "documentId": documentId,
+ "positionId": positionId
+ })
+ if link:
+ logger.debug(f"Created position-document link: {positionId} -> {documentId}")
+ else:
+ logger.warning(f"Failed to create position-document link: {positionId} -> {documentId}")
+ except Exception as e:
+ logger.error(f"Error creating position-document link: {str(e)}")
+
return savedCount
@@ -718,27 +816,16 @@ async def _deleteFile(self, siteId: str, folderPath: str, fileName: str) -> bool
if not fileId:
return False
- # Delete by ID
+ # Delete by ID using apiClient
deleteEndpoint = f"sites/{siteId}/drive/items/{fileId}"
+ result = await self.apiClient.makeGraphApiCall(deleteEndpoint, method="DELETE")
- # Make DELETE request
- if self.services.sharepoint.accessToken is None:
- logger.error("Access token not set for delete")
+ if "error" in result:
+ logger.warning(f"Delete failed: {result['error']}")
return False
- import aiohttp
- headers = {"Authorization": f"Bearer {self.services.sharepoint.accessToken}"}
- url = f"https://graph.microsoft.com/v1.0/{deleteEndpoint}"
-
- async with aiohttp.ClientSession() as session:
- async with session.delete(url, headers=headers) as response:
- if response.status in [200, 204]:
- logger.debug(f"Deleted file: {filePath}")
- return True
- else:
- errorText = await response.text()
- logger.warning(f"Delete failed: {response.status} - {errorText}")
- return False
+ logger.debug(f"Deleted file: {filePath}")
+ return True
except Exception as e:
logger.error(f"Failed to delete file: {str(e)}")
diff --git a/modules/workflows/methods/methodSharepoint/helpers/apiClient.py b/modules/workflows/methods/methodSharepoint/helpers/apiClient.py
index 7cead7ef..542e6dde 100644
--- a/modules/workflows/methods/methodSharepoint/helpers/apiClient.py
+++ b/modules/workflows/methods/methodSharepoint/helpers/apiClient.py
@@ -92,6 +92,18 @@ class ApiClientHelper:
errorText = await response.text()
logger.error(f"Graph API call failed: {response.status} - {errorText}")
return {"error": f"API call failed: {response.status} - {errorText}"}
+
+ elif method == "DELETE":
+ logger.debug(f"Starting DELETE request to {url}")
+ async with session.delete(url, headers=headers) as response:
+ logger.info(f"Graph API response: {response.status}")
+ if response.status in [200, 204]:
+ logger.debug(f"Graph API DELETE success")
+ return {"success": True}
+ else:
+ errorText = await response.text()
+ logger.error(f"Graph API call failed: {response.status} - {errorText}")
+ return {"error": f"API call failed: {response.status} - {errorText}"}
except asyncio.TimeoutError:
logger.error(f"Graph API call timed out after 30 seconds: {endpoint}")
From 7ca957f664d7c0d4574a9c014b2e881b5b71cc1b Mon Sep 17 00:00:00 2001
From: ValueOn AG
Date: Mon, 26 Jan 2026 23:40:12 +0100
Subject: [PATCH 30/32] fixed filter/sort for rtustee
---
.../trustee/interfaceFeatureTrustee.py | 223 +++++++++++++++++-
1 file changed, 217 insertions(+), 6 deletions(-)
diff --git a/modules/features/trustee/interfaceFeatureTrustee.py b/modules/features/trustee/interfaceFeatureTrustee.py
index c3400752..99553108 100644
--- a/modules/features/trustee/interfaceFeatureTrustee.py
+++ b/modules/features/trustee/interfaceFeatureTrustee.py
@@ -212,6 +212,197 @@ class TrusteeObjects:
return getattr(permissions, operation, AccessLevel.NONE)
+ # ===== Pagination Helper Functions =====
+
+ def _applyFilters(self, records: List[Dict[str, Any]], params: Optional[PaginationParams]) -> List[Dict[str, Any]]:
+ """
+ Apply filter criteria to records.
+
+ Supports:
+ - General search: params.filters["search"] - searches across all text fields
+ - Field-specific filters: params.filters
+ - Simple: {"status": "running"} - equals match
+ - With operator: {"status": {"operator": "equals", "value": "running"}}
+ - Operators: equals, contains, gt, gte, lt, lte, in, notIn, startsWith, endsWith
+
+ Args:
+ records: List of record dictionaries to filter
+ params: PaginationParams with filters (search is inside filters)
+
+ Returns:
+ Filtered list of records
+ """
+ if not params or not records:
+ return records
+
+ # Get filters safely (may be None)
+ filters = getattr(params, 'filters', None)
+ if not filters:
+ return records
+
+ filtered = records
+
+ # Handle general search across text fields (search is inside filters)
+ searchTerm = filters.get("search") if isinstance(filters, dict) else None
+ if searchTerm:
+ searchTerm = str(searchTerm).lower()
+ if searchTerm:
+ searchFiltered = []
+ for record in filtered:
+ found = False
+ for key, value in record.items():
+ if isinstance(value, str) and searchTerm in value.lower():
+ found = True
+ break
+ elif isinstance(value, (int, float)) and searchTerm in str(value):
+ found = True
+ break
+ if found:
+ searchFiltered.append(record)
+ filtered = searchFiltered
+
+ # Handle field-specific filters
+ if filters:
+ for fieldName, filterValue in filters.items():
+ if fieldName == "search":
+ continue # Already handled above
+
+ fieldFiltered = []
+ for record in filtered:
+ if fieldName not in record:
+ continue
+
+ recordValue = record.get(fieldName)
+
+ # Handle simple value (equals operator)
+ if not isinstance(filterValue, dict):
+ if recordValue == filterValue:
+ fieldFiltered.append(record)
+ continue
+
+ # Handle filter with operator
+ operator = filterValue.get("operator", "equals")
+ filterVal = filterValue.get("value")
+
+ matches = False
+ if operator in ["equals", "eq"]:
+ matches = recordValue == filterVal
+
+ elif operator == "contains":
+ recordStr = str(recordValue).lower() if recordValue is not None else ""
+ filterStr = str(filterVal).lower() if filterVal is not None else ""
+ matches = filterStr in recordStr
+
+ elif operator == "startsWith":
+ recordStr = str(recordValue).lower() if recordValue is not None else ""
+ filterStr = str(filterVal).lower() if filterVal is not None else ""
+ matches = recordStr.startswith(filterStr)
+
+ elif operator == "endsWith":
+ recordStr = str(recordValue).lower() if recordValue is not None else ""
+ filterStr = str(filterVal).lower() if filterVal is not None else ""
+ matches = recordStr.endswith(filterStr)
+
+ elif operator == "gt":
+ try:
+ recordNum = float(recordValue) if recordValue is not None else float('-inf')
+ filterNum = float(filterVal) if filterVal is not None else float('-inf')
+ matches = recordNum > filterNum
+ except (ValueError, TypeError):
+ matches = False
+
+ elif operator == "gte":
+ try:
+ recordNum = float(recordValue) if recordValue is not None else float('-inf')
+ filterNum = float(filterVal) if filterVal is not None else float('-inf')
+ matches = recordNum >= filterNum
+ except (ValueError, TypeError):
+ matches = False
+
+ elif operator == "lt":
+ try:
+ recordNum = float(recordValue) if recordValue is not None else float('inf')
+ filterNum = float(filterVal) if filterVal is not None else float('inf')
+ matches = recordNum < filterNum
+ except (ValueError, TypeError):
+ matches = False
+
+ elif operator == "lte":
+ try:
+ recordNum = float(recordValue) if recordValue is not None else float('inf')
+ filterNum = float(filterVal) if filterVal is not None else float('inf')
+ matches = recordNum <= filterNum
+ except (ValueError, TypeError):
+ matches = False
+
+ elif operator == "in":
+ if isinstance(filterVal, list):
+ matches = recordValue in filterVal
+ else:
+ matches = False
+
+ elif operator == "notIn":
+ if isinstance(filterVal, list):
+ matches = recordValue not in filterVal
+ else:
+ matches = False
+
+ if matches:
+ fieldFiltered.append(record)
+
+ filtered = fieldFiltered
+
+ return filtered
+
+ def _applySorting(self, records: List[Dict[str, Any]], params: Optional[PaginationParams]) -> List[Dict[str, Any]]:
+ """Apply multi-level sorting to records using stable sort."""
+ if not params:
+ return records
+
+ # Get sort safely (may be None or empty list)
+ sortFields = getattr(params, 'sort', None)
+ if not sortFields:
+ return records
+
+ sortedRecords = list(records)
+
+ # Sort from least significant to most significant field (reverse order)
+ # Python's sort is stable, so this creates proper multi-level sorting
+ for sortField in reversed(sortFields):
+ # Handle both dict and object formats
+ if isinstance(sortField, dict):
+ fieldName = sortField.get("field")
+ direction = sortField.get("direction", "asc")
+ else:
+ fieldName = getattr(sortField, "field", None)
+ direction = getattr(sortField, "direction", "asc")
+
+ if not fieldName:
+ continue
+
+ isDesc = (direction == "desc")
+
+ def makeSortKey(fName):
+ def sortKey(record):
+ value = record.get(fName)
+ # Handle None values - place them at the end for both directions
+ if value is None:
+ return (1, "") # sorts after (0, ...)
+ else:
+ if isinstance(value, (int, float)):
+ return (0, value)
+ elif isinstance(value, str):
+ return (0, value.lower())
+ elif isinstance(value, bool):
+ return (0, value)
+ else:
+ return (0, str(value))
+ return sortKey
+
+ sortedRecords.sort(key=makeSortKey(fieldName), reverse=isDesc)
+
+ return sortedRecords
+
# ===== Organisation CRUD =====
def createOrganisation(self, data: Dict[str, Any]) -> Optional[TrusteeOrganisation]:
@@ -819,12 +1010,22 @@ class TrusteeObjects:
featureInstanceId=self.featureInstanceId
)
- # Convert dicts to Pydantic objects (remove binary data and internal fields)
- pydanticItems = []
+ # Clean records (remove binary data and internal fields) - keep as dicts for filtering/sorting
+ cleanedRecords = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_") and k != "documentData"}
- pydanticItems.append(TrusteeDocument(**cleanedRecord))
+ cleanedRecords.append(cleanedRecord)
+ # Step 2: Apply filters (search and field filters)
+ filteredRecords = self._applyFilters(cleanedRecords, params)
+
+ # Step 3: Apply sorting
+ sortedRecords = self._applySorting(filteredRecords, params)
+
+ # Step 4: Convert to Pydantic objects
+ pydanticItems = [TrusteeDocument(**r) for r in sortedRecords]
+
+ # Step 5: Apply pagination
totalItems = len(pydanticItems)
if params:
pageSize = params.pageSize or 20
@@ -965,12 +1166,22 @@ class TrusteeObjects:
featureInstanceId=self.featureInstanceId
)
- # Convert dicts to Pydantic objects (remove internal fields)
- pydanticItems = []
+ # Clean records (remove internal fields) - keep as dicts for filtering/sorting
+ cleanedRecords = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
- pydanticItems.append(TrusteePosition(**cleanedRecord))
+ cleanedRecords.append(cleanedRecord)
+ # Step 2: Apply filters (search and field filters)
+ filteredRecords = self._applyFilters(cleanedRecords, params)
+
+ # Step 3: Apply sorting
+ sortedRecords = self._applySorting(filteredRecords, params)
+
+ # Step 4: Convert to Pydantic objects
+ pydanticItems = [TrusteePosition(**r) for r in sortedRecords]
+
+ # Step 5: Apply pagination
totalItems = len(pydanticItems)
if params:
pageSize = params.pageSize or 20
From ee15fd64b07bb9b21228d1f6152355cf701740ea Mon Sep 17 00:00:00 2001
From: ValueOn AG
Date: Mon, 26 Jan 2026 23:48:19 +0100
Subject: [PATCH 31/32] fix crosstable trustee
---
.../trustee/interfaceFeatureTrustee.py | 24 +++++++++++++++++--
modules/routes/routeAdminFeatures.py | 24 ++++++++++++++-----
2 files changed, 40 insertions(+), 8 deletions(-)
diff --git a/modules/features/trustee/interfaceFeatureTrustee.py b/modules/features/trustee/interfaceFeatureTrustee.py
index 99553108..b5e08e2b 100644
--- a/modules/features/trustee/interfaceFeatureTrustee.py
+++ b/modules/features/trustee/interfaceFeatureTrustee.py
@@ -1095,7 +1095,8 @@ class TrusteeObjects:
def deleteDocument(self, documentId: str) -> bool:
"""Delete a document.
- Note: organisationId and contractId removed - feature instance IS the organisation.
+ All position-document cross-table entries (TrusteePositionDocument) referencing
+ this document are deleted first, then the document.
"""
# Get existing document to check creator
existingRecords = self.db.getRecordset(TrusteeDocument, recordFilter={"id": documentId})
@@ -1112,6 +1113,7 @@ class TrusteeObjects:
logger.warning(f"User {self.userId} lacks permission to delete document")
return False
+ self._deletePositionDocumentLinksForDocument(documentId)
return self.db.recordDelete(TrusteeDocument, documentId)
# ===== Position CRUD =====
@@ -1259,7 +1261,8 @@ class TrusteeObjects:
def deletePosition(self, positionId: str) -> bool:
"""Delete a position.
- Note: organisationId and contractId removed - feature instance IS the organisation.
+ All position-document cross-table entries (TrusteePositionDocument) referencing
+ this position are deleted first, then the position.
"""
# Get existing position to check creator
existingRecords = self.db.getRecordset(TrusteePosition, recordFilter={"id": positionId})
@@ -1276,6 +1279,7 @@ class TrusteeObjects:
logger.warning(f"User {self.userId} lacks permission to delete position")
return False
+ self._deletePositionDocumentLinksForPosition(positionId)
return self.db.recordDelete(TrusteePosition, positionId)
# ===== Position-Document Link CRUD =====
@@ -1423,6 +1427,22 @@ class TrusteeObjects:
return self.db.recordDelete(TrusteePositionDocument, linkId)
+ def _deletePositionDocumentLinksForDocument(self, documentId: str) -> None:
+ """Delete all position-document cross-table entries referencing this document."""
+ links = self.db.getRecordset(TrusteePositionDocument, recordFilter={"documentId": documentId})
+ for link in links:
+ linkId = link.get("id")
+ if linkId:
+ self.db.recordDelete(TrusteePositionDocument, linkId)
+
+ def _deletePositionDocumentLinksForPosition(self, positionId: str) -> None:
+ """Delete all position-document cross-table entries referencing this position."""
+ links = self.db.getRecordset(TrusteePositionDocument, recordFilter={"positionId": positionId})
+ for link in links:
+ linkId = link.get("id")
+ if linkId:
+ self.db.recordDelete(TrusteePositionDocument, linkId)
+
# ===== Trustee-specific Access Check =====
def getUserAccessForOrganisation(self, userId: str, organisationId: str) -> List[Dict[str, Any]]:
diff --git a/modules/routes/routeAdminFeatures.py b/modules/routes/routeAdminFeatures.py
index 82b796c1..56a79741 100644
--- a/modules/routes/routeAdminFeatures.py
+++ b/modules/routes/routeAdminFeatures.py
@@ -878,6 +878,12 @@ class FeatureInstanceUserResponse(BaseModel):
enabled: bool
+class FeatureInstanceUserUpdate(BaseModel):
+ """Request model for updating a feature instance user (roles and active flag)"""
+ roleIds: List[str] = Field(..., description="Role IDs to assign")
+ enabled: Optional[bool] = Field(None, description="Whether this user's access is active (omit to leave unchanged)")
+
+
@router.get("/instances/{instanceId}/users", response_model=List[FeatureInstanceUserResponse])
@limiter.limit("60/minute")
async def list_feature_instance_users(
@@ -1161,18 +1167,19 @@ async def update_feature_instance_user_roles(
request: Request,
instanceId: str,
userId: str,
- roleIds: List[str],
+ data: FeatureInstanceUserUpdate,
context: RequestContext = Depends(getRequestContext)
) -> Dict[str, Any]:
"""
- Update a user's roles in a feature instance.
+ Update a user's roles and active flag in a feature instance.
Replaces all existing FeatureAccessRole records with new ones.
+ If enabled is provided, updates the FeatureAccess.enabled flag.
Args:
instanceId: FeatureInstance ID
userId: User ID to update
- roleIds: New list of role IDs
+ data: roleIds and optional enabled
"""
try:
rootInterface = getRootInterface()
@@ -1215,6 +1222,10 @@ async def update_feature_instance_user_roles(
featureAccessId = existingAccess[0].get("id")
+ # Update enabled flag if provided
+ if data.enabled is not None:
+ rootInterface.db.recordModify(FeatureAccess, featureAccessId, {"enabled": data.enabled})
+
# Delete existing FeatureAccessRole records
existingRoles = rootInterface.db.getRecordset(
FeatureAccessRole,
@@ -1224,7 +1235,7 @@ async def update_feature_instance_user_roles(
rootInterface.db.recordDelete(FeatureAccessRole, role.get("id"))
# Create new FeatureAccessRole records
- for roleId in roleIds:
+ for roleId in data.roleIds:
featureAccessRole = FeatureAccessRole(
featureAccessId=featureAccessId,
roleId=roleId
@@ -1232,14 +1243,15 @@ async def update_feature_instance_user_roles(
rootInterface.db.recordCreate(FeatureAccessRole, featureAccessRole.model_dump())
logger.info(
- f"User {context.user.id} updated roles for user {userId} in feature instance {instanceId}: {roleIds}"
+ f"User {context.user.id} updated roles for user {userId} in feature instance {instanceId}: {data.roleIds}"
)
return {
"featureAccessId": featureAccessId,
"userId": userId,
"featureInstanceId": instanceId,
- "roleIds": roleIds
+ "roleIds": data.roleIds,
+ "enabled": data.enabled if data.enabled is not None else existingAccess[0].get("enabled", True)
}
except HTTPException:
From 4c91bd76077f786495066b7f12db87a17fbfaf6b Mon Sep 17 00:00:00 2001
From: ValueOn AG
Date: Tue, 27 Jan 2026 00:28:31 +0100
Subject: [PATCH 32/32] fixes
---
modules/interfaces/interfaceDbApp.py | 20 ++++++++++++++++++--
1 file changed, 18 insertions(+), 2 deletions(-)
diff --git a/modules/interfaces/interfaceDbApp.py b/modules/interfaces/interfaceDbApp.py
index 250b2a38..1f1d1e53 100644
--- a/modules/interfaces/interfaceDbApp.py
+++ b/modules/interfaces/interfaceDbApp.py
@@ -742,8 +742,16 @@ class AppObjects:
logger.error(f"Unexpected error creating user: {str(e)}")
raise ValueError(f"Failed to create user: {str(e)}")
- def updateUser(self, userId: str, updateData: Union[Dict[str, Any], User]) -> User:
- """Update a user's information"""
+ def updateUser(self, userId: str, updateData: Union[Dict[str, Any], User], allowSysAdminChange: bool = False) -> User:
+ """Update a user's information.
+
+ Args:
+ userId: ID of the user to update
+ updateData: User data to update (dict or User model)
+ allowSysAdminChange: If True, allows changing isSysAdmin field.
+ Only set to True when called by a SysAdmin explicitly
+ changing another user's admin status.
+ """
try:
# Get user
user = self.getUser(userId)
@@ -758,6 +766,14 @@ class AppObjects:
# Remove id field from updateDict if present - we'll use userId from parameter
updateDict.pop("id", None)
+
+ # SECURITY: Protect sensitive fields from being overwritten by profile updates.
+ # These fields should only be changed explicitly by admins, not through
+ # profile forms where they might be sent as default values (e.g., isSysAdmin=False).
+ protectedFields = ["isSysAdmin"]
+ if not allowSysAdminChange:
+ for field in protectedFields:
+ updateDict.pop(field, None)
# Update user data using model
updatedData = user.model_dump()