From 24f0c3e2ebec7dd27d40dfb732a40de13079c6d3 Mon Sep 17 00:00:00 2001 From: ValueOn AG
]` so the
+# DataPicker / RequiredAttributePicker can filter compatible upstream paths.
+# - frontendType "featureInstance" is rendered by FeatureInstancePicker which
+# loads /options/feature.instance?featureCode=trustee for the current mandate.
+_TRUSTEE_INSTANCE_PARAM = {
+ "name": "featureInstanceId",
+ "type": "FeatureInstanceRef[trustee]",
+ "required": True,
+ "frontendType": "featureInstance",
+ "frontendOptions": {"featureCode": "trustee"},
+ "description": t("Trustee-Mandant"),
+}
+
TRUSTEE_NODES = [
{
"id": "trustee.refreshAccountingData",
@@ -10,8 +24,7 @@ TRUSTEE_NODES = [
"label": t("Buchhaltungsdaten aktualisieren"),
"description": t("Buchhaltungsdaten aus externem System importieren/aktualisieren."),
"parameters": [
- {"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
- "description": t("Trustee Feature-Instanz-ID")},
+ dict(_TRUSTEE_INSTANCE_PARAM),
{"name": "forceRefresh", "type": "boolean", "required": False, "frontendType": "checkbox",
"description": t("Import erzwingen"), "default": False},
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
@@ -39,8 +52,7 @@ TRUSTEE_NODES = [
{"name": "sharepointFolder", "type": "string", "required": False, "frontendType": "sharepointFolder",
"frontendOptions": {"dependsOn": "connectionReference"},
"description": t("SharePoint-Ordnerpfad"), "default": ""},
- {"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
- "description": t("Trustee Feature-Instanz-ID")},
+ dict(_TRUSTEE_INSTANCE_PARAM),
{"name": "prompt", "type": "string", "required": False, "frontendType": "textarea",
"description": t("AI-Prompt für Extraktion"), "default": ""},
],
@@ -62,12 +74,11 @@ TRUSTEE_NODES = [
"description": t("TrusteeDocument + TrusteePosition aus Extraktionsergebnis erstellen."),
"parameters": [
# Type matches what producers actually emit: ActionResult.documents
- # is `List[ActionDocument]` (see datamodelChat.ActionResult). The
+ # is List[ActionDocument] (see datamodelChat.ActionResult). The
# DataPicker uses this string to filter compatible upstream paths.
{"name": "documentList", "type": "List[ActionDocument]", "required": True, "frontendType": "dataRef",
- "description": t("Dokumentenliste eines Upstream-Producers (z.B. trustee.extractFromFiles → documents); via expliziten DataRef im Graph zu binden — Pick-not-Push, kein Auto-Wire")},
- {"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
- "description": t("Trustee Feature-Instanz-ID")},
+ "description": t("Dokumentenliste — gebunden via DataRef.")},
+ dict(_TRUSTEE_INSTANCE_PARAM),
],
"inputs": 1,
"outputs": 1,
@@ -83,13 +94,9 @@ TRUSTEE_NODES = [
"label": t("In Buchhaltung synchronisieren"),
"description": t("Trustee-Positionen in Buchhaltungssystem übertragen."),
"parameters": [
- # Type matches what producers actually emit: ActionResult.documents
- # is `List[ActionDocument]` (see datamodelChat.ActionResult). The
- # DataPicker uses this string to filter compatible upstream paths.
{"name": "documentList", "type": "List[ActionDocument]", "required": True, "frontendType": "dataRef",
- "description": t("Verarbeitete Dokumentenliste eines Upstream-Producers (z.B. trustee.processDocuments → documents); via expliziten DataRef im Graph zu binden — Pick-not-Push, kein Auto-Wire")},
- {"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
- "description": t("Trustee Feature-Instanz-ID")},
+ "description": t("Verarbeitete Dokumentenliste — gebunden via DataRef.")},
+ dict(_TRUSTEE_INSTANCE_PARAM),
],
"inputs": 1,
"outputs": 1,
@@ -105,8 +112,7 @@ TRUSTEE_NODES = [
"label": t("Treuhand-Daten abfragen"),
"description": t("Daten aus der Trustee-DB lesen (Lookup, Aggregation, Roh-Export). Pendant zu refreshAccountingData ohne externen Sync."),
"parameters": [
- {"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
- "description": t("Trustee Feature-Instanz-ID")},
+ dict(_TRUSTEE_INSTANCE_PARAM),
{"name": "mode", "type": "string", "required": True, "frontendType": "select",
"frontendOptions": {"options": ["lookup", "raw", "aggregate"]},
"description": t("Abfragemodus"), "default": "lookup"},
diff --git a/modules/features/graphicalEditor/nodeRegistry.py b/modules/features/graphicalEditor/nodeRegistry.py
index dd302282..632e98fc 100644
--- a/modules/features/graphicalEditor/nodeRegistry.py
+++ b/modules/features/graphicalEditor/nodeRegistry.py
@@ -9,7 +9,7 @@ import logging
from typing import Dict, List, Any, Optional
from modules.features.graphicalEditor.nodeDefinitions import STATIC_NODE_TYPES
-from modules.features.graphicalEditor.nodeAdapter import _bindsActionFromLegacy
+from modules.features.graphicalEditor.nodeAdapter import bindsActionFromLegacy
from modules.features.graphicalEditor.portTypes import PORT_TYPE_CATALOG, SYSTEM_VARIABLES
from modules.shared.i18nRegistry import normalizePrimaryLanguageTag, resolveText
@@ -50,7 +50,7 @@ def _localizeNode(node: Dict[str, Any], language: str) -> Dict[str, Any]:
fields.
"""
lang = normalizePrimaryLanguageTag(language, "en")
- bindsAction = _bindsActionFromLegacy(node)
+ bindsAction = bindsActionFromLegacy(node)
out = dict(node)
for key in list(out.keys()):
if key.startswith("_"):
diff --git a/modules/features/graphicalEditor/portTypes.py b/modules/features/graphicalEditor/portTypes.py
index b607316a..e8d5b48d 100644
--- a/modules/features/graphicalEditor/portTypes.py
+++ b/modules/features/graphicalEditor/portTypes.py
@@ -610,7 +610,7 @@ SYSTEM_VARIABLES: Dict[str, Dict[str, str]] = {
}
-def _resolveSystemVariable(variable: str, context: Dict[str, Any]) -> Any:
+def resolveSystemVariable(variable: str, context: Dict[str, Any]) -> Any:
"""Resolve a system variable name to its runtime value."""
from datetime import datetime, timezone
@@ -642,7 +642,7 @@ def _resolveSystemVariable(variable: str, context: Dict[str, Any]) -> Any:
# Output normalizers
# ---------------------------------------------------------------------------
-def _normalizeToSchema(raw: Any, schemaName: str) -> Dict[str, Any]:
+def normalizeToSchema(raw: Any, schemaName: str) -> Dict[str, Any]:
"""
Normalize raw executor output to match the declared port schema.
Ensures _success/_error meta-fields are always present.
@@ -696,12 +696,12 @@ def _normalizeError(error: Exception, schemaName: str) -> Dict[str, Any]:
# Transit helpers
# ---------------------------------------------------------------------------
-def _wrapTransit(data: Any, meta: Dict[str, Any]) -> Dict[str, Any]:
+def wrapTransit(data: Any, meta: Dict[str, Any]) -> Dict[str, Any]:
"""Wrap data in a Transit envelope."""
return {"_transit": True, "_meta": meta, "data": data}
-def _unwrapTransit(output: Any) -> Any:
+def unwrapTransit(output: Any) -> Any:
"""Unwrap a Transit envelope, returning the inner data."""
if isinstance(output, dict) and output.get("_transit"):
return output.get("data")
@@ -726,10 +726,10 @@ def _resolveTransitChain(
return out
sources = connectionMap.get(current, [])
if not sources:
- return _unwrapTransit(out)
+ return unwrapTransit(out)
srcId = sources[0][0] if sources else None
if not srcId:
- return _unwrapTransit(out)
+ return unwrapTransit(out)
current = srcId
return nodeOutputs.get(nodeId)
@@ -738,7 +738,7 @@ def _resolveTransitChain(
# Schema derivation for dynamic outputs
# ---------------------------------------------------------------------------
-def _derive_form_payload_schema_from_param(node: Dict[str, Any], param_key: str) -> Optional[PortSchema]:
+def deriveFormPayloadSchemaFromParam(node: Dict[str, Any], param_key: str) -> Optional[PortSchema]:
"""Derive output schema from a field-builder JSON list (``fields``, ``formFields``, …)."""
fields_param = (node.get("parameters") or {}).get(param_key)
if not fields_param or not isinstance(fields_param, list):
@@ -776,7 +776,7 @@ def _derive_form_payload_schema_from_param(node: Dict[str, Any], param_key: str)
def _deriveFormPayloadSchema(node: Dict[str, Any]) -> Optional[PortSchema]:
"""Derive output schema from form field definitions (``parameters.fields``)."""
- return _derive_form_payload_schema_from_param(node, "fields")
+ return deriveFormPayloadSchemaFromParam(node, "fields")
def parse_graph_defined_output_schema(
@@ -796,9 +796,9 @@ def parse_graph_defined_output_schema(
schema_spec = output_port.get("schema")
if isinstance(schema_spec, dict) and schema_spec.get("kind") == "fromGraph":
param_key = str(schema_spec.get("parameter") or "fields")
- return _derive_form_payload_schema_from_param(node, param_key)
+ return deriveFormPayloadSchemaFromParam(node, param_key)
if output_port.get("dynamic") and output_port.get("deriveFrom"):
- return _derive_form_payload_schema_from_param(node, str(output_port.get("deriveFrom")))
+ return deriveFormPayloadSchemaFromParam(node, str(output_port.get("deriveFrom")))
if isinstance(schema_spec, str) and schema_spec:
return PORT_TYPE_CATALOG.get(schema_spec)
return None
diff --git a/modules/features/graphicalEditor/routeFeatureGraphicalEditor.py b/modules/features/graphicalEditor/routeFeatureGraphicalEditor.py
index 4332df50..dc136395 100644
--- a/modules/features/graphicalEditor/routeFeatureGraphicalEditor.py
+++ b/modules/features/graphicalEditor/routeFeatureGraphicalEditor.py
@@ -14,7 +14,7 @@ from fastapi import APIRouter, Depends, Path, Query, Body, Request, HTTPExceptio
from fastapi.responses import JSONResponse, StreamingResponse, Response
from modules.auth import limiter, getRequestContext, RequestContext
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
-from modules.routes.routeHelpers import _applyFiltersAndSort
+from modules.routes.routeHelpers import applyFiltersAndSort
from modules.features.graphicalEditor.mainGraphicalEditor import getGraphicalEditorServices
from modules.features.graphicalEditor.nodeRegistry import getNodeTypesForApi
@@ -230,6 +230,65 @@ def get_user_connection_options(
return {"options": options}
+@router.get("/{instanceId}/options/feature.instance")
+@limiter.limit("60/minute")
+def get_feature_instance_options(
+ request: Request,
+ instanceId: str = Path(..., description="GraphicalEditor feature instance ID (workflow context)"),
+ featureCode: str = Query(..., description="Feature code to filter by (e.g. 'trustee', 'redmine', 'clickup')"),
+ enabledOnly: bool = Query(True, description="If true (default), only enabled feature instances are returned"),
+ context: RequestContext = Depends(getRequestContext),
+) -> dict:
+ """Return mandate-scoped FeatureInstances for the given featureCode.
+
+ Used by node parameters with frontendType='featureInstance' (e.g. Trustee
+ or Redmine nodes that need to bind to a specific tenant FeatureInstance).
+ Always restricted to the calling user's mandate (derived from the workflow
+ feature instance) so the picker never leaks foreign-mandate instances.
+
+ Response: { options: [ { value: "", label: " ([code])" } ] }
+ """
+ mandateId = _validateInstanceAccess(instanceId, context)
+ if not context.user:
+ raise HTTPException(status_code=401, detail=routeApiMsg("Authentication required"))
+ code = (featureCode or "").strip().lower()
+ if not code:
+ raise HTTPException(status_code=400, detail=routeApiMsg("featureCode query parameter is required"))
+ if not mandateId:
+ return {"options": []}
+
+ from modules.interfaces.interfaceDbApp import getRootInterface
+ rootInterface = getRootInterface()
+ try:
+ instances = rootInterface.getFeatureInstancesByMandate(
+ mandateId, enabledOnly=bool(enabledOnly)
+ ) or []
+ except Exception as e:
+ logger.error(
+ "get_feature_instance_options: failed to load instances mandateId=%s: %s",
+ mandateId, e, exc_info=True,
+ )
+ return {"options": []}
+
+ options: List[Dict[str, str]] = []
+ for fi in instances:
+ fiCode = (getattr(fi, "featureCode", "") or "").strip().lower()
+ if fiCode != code:
+ continue
+ fiId = str(getattr(fi, "id", "") or "")
+ if not fiId:
+ continue
+ rawLabel = getattr(fi, "label", None) or getattr(fi, "name", None) or fiId
+ options.append({"value": fiId, "label": f"{rawLabel} ({fiCode})"})
+
+ logger.info(
+ "graphicalEditor feature.instance options: instanceId=%s mandateId=%s "
+ "featureCode=%s enabledOnly=%s -> %d options",
+ instanceId, mandateId, code, enabledOnly, len(options),
+ )
+ return {"options": options}
+
+
@router.post("/{instanceId}/execute")
@limiter.limit("30/minute")
async def post_execute(
@@ -474,6 +533,10 @@ def get_templates(
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
templates = iface.getTemplates(scope=scope)
+ from modules.routes.routeHelpers import enrichRowsWithFkLabels
+ from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import AutoWorkflow
+ enrichRowsWithFkLabels(templates, AutoWorkflow)
+
paginationParams = None
if pagination:
try:
@@ -485,7 +548,7 @@ def get_templates(
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
if paginationParams:
- filtered = _applyFiltersAndSort(templates, paginationParams)
+ filtered = applyFiltersAndSort(templates, paginationParams)
totalItems = len(filtered)
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
@@ -906,15 +969,15 @@ async def _runEditorAgent(
enrichedPrompt = prompt
if dataSourceIds:
- from modules.features.workspace.routeFeatureWorkspace import _buildDataSourceContext
+ from modules.features.workspace.routeFeatureWorkspace import buildDataSourceContext
chatSvc = getService("chat", ctx)
- dsInfo = _buildDataSourceContext(chatSvc, dataSourceIds)
+ dsInfo = buildDataSourceContext(chatSvc, dataSourceIds)
if dsInfo:
enrichedPrompt = f"{prompt}\n\n[Active Data Sources]\n{dsInfo}"
if featureDataSourceIds:
- from modules.features.workspace.routeFeatureWorkspace import _buildFeatureDataSourceContext
- fdsInfo = _buildFeatureDataSourceContext(featureDataSourceIds)
+ from modules.features.workspace.routeFeatureWorkspace import buildFeatureDataSourceContext
+ fdsInfo = buildFeatureDataSourceContext(featureDataSourceIds)
if fdsInfo:
enrichedPrompt = f"{enrichedPrompt}\n\n[Attached Feature Data Sources]\n{fdsInfo}"
@@ -1224,7 +1287,7 @@ def get_workflows(
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
if paginationParams:
- filtered = _applyFiltersAndSort(enriched, paginationParams)
+ filtered = applyFiltersAndSort(enriched, paginationParams)
totalItems = len(filtered)
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
diff --git a/modules/features/redmine/serviceRedmine.py b/modules/features/redmine/serviceRedmine.py
index e244bd84..f0cfbfb4 100644
--- a/modules/features/redmine/serviceRedmine.py
+++ b/modules/features/redmine/serviceRedmine.py
@@ -48,7 +48,7 @@ from modules.features.redmine.interfaceFeatureRedmine import (
RedmineObjects,
getInterface,
)
-from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
+from modules.features.redmine.serviceRedmineStatsCache import getStatsCache
logger = logging.getLogger(__name__)
@@ -334,7 +334,7 @@ def getTicket(
def _invalidateCache(featureInstanceId: str) -> None:
try:
- _getStatsCache().invalidateInstance(featureInstanceId)
+ getStatsCache().invalidateInstance(featureInstanceId)
except Exception as e:
logger.warning(f"Failed to invalidate stats cache for {featureInstanceId}: {e}")
diff --git a/modules/features/redmine/serviceRedmineStats.py b/modules/features/redmine/serviceRedmineStats.py
index 2cfed27c..33a83aa7 100644
--- a/modules/features/redmine/serviceRedmineStats.py
+++ b/modules/features/redmine/serviceRedmineStats.py
@@ -38,7 +38,7 @@ from modules.features.redmine.datamodelRedmine import (
RedmineThroughputBucket,
RedmineTicketDto,
)
-from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
+from modules.features.redmine.serviceRedmineStatsCache import getStatsCache
logger = logging.getLogger(__name__)
@@ -69,7 +69,7 @@ async def getStats(
if status_norm not in {"*", "open", "closed"}:
status_norm = "*"
- cache = _getStatsCache()
+ cache = getStatsCache()
# Cache key now includes the new dimensions so different filter combos
# don't collide. ``_freeze`` (in the cache module) hashes lists/sets
# for us, so we can pass them directly as extra dimensions.
diff --git a/modules/features/redmine/serviceRedmineStatsCache.py b/modules/features/redmine/serviceRedmineStatsCache.py
index 46ad9372..12176178 100644
--- a/modules/features/redmine/serviceRedmineStatsCache.py
+++ b/modules/features/redmine/serviceRedmineStatsCache.py
@@ -123,7 +123,7 @@ class RedmineStatsCache:
_globalCache: Optional[RedmineStatsCache] = None
-def _getStatsCache() -> RedmineStatsCache:
+def getStatsCache() -> RedmineStatsCache:
"""Process-wide singleton."""
global _globalCache
if _globalCache is None:
diff --git a/modules/features/redmine/serviceRedmineSync.py b/modules/features/redmine/serviceRedmineSync.py
index 2c631630..2fd269d1 100644
--- a/modules/features/redmine/serviceRedmineSync.py
+++ b/modules/features/redmine/serviceRedmineSync.py
@@ -38,7 +38,7 @@ from modules.features.redmine.datamodelRedmine import (
RedmineTicketMirror,
)
from modules.features.redmine.interfaceFeatureRedmine import getInterface
-from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
+from modules.features.redmine.serviceRedmineStatsCache import getStatsCache
logger = logging.getLogger(__name__)
@@ -134,7 +134,7 @@ async def runSync(
durationMs=duration_ms,
lastSyncAt=now_epoch,
)
- _getStatsCache().invalidateInstance(featureInstanceId)
+ getStatsCache().invalidateInstance(featureInstanceId)
return RedmineSyncResultDto(
instanceId=featureInstanceId,
@@ -188,7 +188,7 @@ async def upsertSingleTicket(
now_epoch = time.time()
_upsertTicket(iface, featureInstanceId, mandateId, issue, now_epoch)
relations_upserted = _replaceRelations(iface, featureInstanceId, issue, now_epoch)
- _getStatsCache().invalidateInstance(featureInstanceId)
+ getStatsCache().invalidateInstance(featureInstanceId)
return relations_upserted
@@ -202,7 +202,7 @@ def deleteMirroredTicket(
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
deleted = iface.deleteMirroredTicket(featureInstanceId, int(issueId))
iface.deleteMirroredRelationsForIssue(featureInstanceId, int(issueId))
- _getStatsCache().invalidateInstance(featureInstanceId)
+ getStatsCache().invalidateInstance(featureInstanceId)
return deleted
diff --git a/modules/features/teamsbot/routeFeatureTeamsbot.py b/modules/features/teamsbot/routeFeatureTeamsbot.py
index 37cb2d77..3368f9fc 100644
--- a/modules/features/teamsbot/routeFeatureTeamsbot.py
+++ b/modules/features/teamsbot/routeFeatureTeamsbot.py
@@ -383,7 +383,7 @@ async def streamSession(
async def _eventGenerator():
"""Generate SSE events from the session event queue."""
- from .service import _sessionEvents
+ from .service import sessionEvents
# Send initial session state
yield f"data: {json.dumps({'type': 'sessionState', 'data': session})}\n\n"
@@ -394,10 +394,10 @@ async def streamSession(
yield f"data: {json.dumps({'type': 'botConnectionState', 'data': {'connected': _getActiveService(sessionId) is not None}})}\n\n"
# Stream events
- eventQueue = _sessionEvents.get(sessionId)
+ eventQueue = sessionEvents.get(sessionId)
if not eventQueue:
- _sessionEvents[sessionId] = asyncio.Queue()
- eventQueue = _sessionEvents[sessionId]
+ sessionEvents[sessionId] = asyncio.Queue()
+ eventQueue = sessionEvents[sessionId]
try:
while True:
@@ -810,8 +810,8 @@ async def deleteUserAccount(
# MFA Code Submission (relayed to active bot session)
# =========================================================================
-_mfaCodeQueues: dict = {}
-_mfaWaitTasks: dict = {}
+mfaCodeQueues: dict = {}
+mfaWaitTasks: dict = {}
@router.post("/{instanceId}/sessions/{sessionId}/mfa")
@limiter.limit("10/minute")
@@ -834,7 +834,7 @@ async def submitMfaCode(
logger.info(f"MFA submission for session {sessionId}: action={mfaAction}, codeLen={len(mfaCode)}")
- queue = _mfaCodeQueues.get(sessionId)
+ queue = mfaCodeQueues.get(sessionId)
if queue:
await queue.put({"action": mfaAction, "code": mfaCode})
return {"submitted": True}
@@ -981,7 +981,7 @@ async def testVoice(
):
"""Test TTS voice with AI-generated sample text in the correct language."""
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
- from .service import _createAiService
+ from .service import createAiService
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
mandateId = _validateInstanceAccess(instanceId, context)
@@ -992,7 +992,7 @@ async def testVoice(
botName = body.get("botName", "AI Assistant")
try:
- aiService = _createAiService(context.user, mandateId, instanceId)
+ aiService = createAiService(context.user, mandateId, instanceId)
await aiService.ensureAiObjectsInitialized()
aiRequest = AiCallRequest(
diff --git a/modules/features/teamsbot/service.py b/modules/features/teamsbot/service.py
index 2067a7f2..6d9df074 100644
--- a/modules/features/teamsbot/service.py
+++ b/modules/features/teamsbot/service.py
@@ -532,7 +532,7 @@ def getActiveService(sessionId: str) -> Optional["TeamsbotService"]:
# AI Service Factory (for billing-aware AI calls)
# =========================================================================
-def _createAiService(user, mandateId, featureInstanceId=None):
+def createAiService(user, mandateId, featureInstanceId=None):
"""Create a properly wired AiService via the service center."""
ctx = ServiceCenterContext(
user=user,
@@ -546,15 +546,15 @@ def _createAiService(user, mandateId, featureInstanceId=None):
# =========================================================================
# Session Event Queues (for SSE streaming to frontend)
# =========================================================================
-_sessionEvents: Dict[str, asyncio.Queue] = {}
+sessionEvents: Dict[str, asyncio.Queue] = {}
async def _emitSessionEvent(sessionId: str, eventType: str, data: Any):
"""Emit an event to the session's SSE stream.
Creates the queue on-demand so events are never silently dropped."""
- if sessionId not in _sessionEvents:
- _sessionEvents[sessionId] = asyncio.Queue()
- await _sessionEvents[sessionId].put({"type": eventType, "data": data, "timestamp": getIsoTimestamp()})
+ if sessionId not in sessionEvents:
+ sessionEvents[sessionId] = asyncio.Queue()
+ await sessionEvents[sessionId].put({"type": eventType, "data": data, "timestamp": getIsoTimestamp()})
def _normalizeGatewayHostForBotWs(host: str) -> str:
@@ -709,7 +709,7 @@ class TeamsbotService:
interface = interfaceDb.getInterface(self.currentUser, self.mandateId, self.instanceId)
# Initialize SSE event queue
- _sessionEvents[sessionId] = asyncio.Queue()
+ sessionEvents[sessionId] = asyncio.Queue()
try:
# Update status to JOINING
@@ -798,7 +798,7 @@ class TeamsbotService:
})
# Cleanup event queue
- _sessionEvents.pop(sessionId, None)
+ sessionEvents.pop(sessionId, None)
# =========================================================================
# Browser Bot WebSocket Communication
@@ -1048,9 +1048,9 @@ class TeamsbotService:
"timestamp": getIsoTimestamp(),
})
- from .routeFeatureTeamsbot import _mfaCodeQueues, _mfaWaitTasks
+ from .routeFeatureTeamsbot import mfaCodeQueues, mfaWaitTasks
mfaQueue = asyncio.Queue()
- _mfaCodeQueues[sessionId] = mfaQueue
+ mfaCodeQueues[sessionId] = mfaQueue
async def _waitAndForwardMfa(sid, queue, ws):
try:
@@ -1075,10 +1075,10 @@ class TeamsbotService:
except asyncio.CancelledError:
logger.info(f"[WS] MFA wait cancelled for session {sid} (resolved via page)")
finally:
- _mfaCodeQueues.pop(sid, None)
- _mfaWaitTasks.pop(sid, None)
+ mfaCodeQueues.pop(sid, None)
+ mfaWaitTasks.pop(sid, None)
- _mfaWaitTasks[sessionId] = asyncio.create_task(
+ mfaWaitTasks[sessionId] = asyncio.create_task(
_waitAndForwardMfa(sessionId, mfaQueue, websocket)
)
@@ -1100,11 +1100,11 @@ class TeamsbotService:
elif msgType == "mfaResolved":
success = message.get("success", False)
logger.info(f"[WS] MFA resolved: success={success}")
- from .routeFeatureTeamsbot import _mfaCodeQueues, _mfaWaitTasks
- task = _mfaWaitTasks.pop(sessionId, None)
+ from .routeFeatureTeamsbot import mfaCodeQueues, mfaWaitTasks
+ task = mfaWaitTasks.pop(sessionId, None)
if task and not task.done():
task.cancel()
- _mfaCodeQueues.pop(sessionId, None)
+ mfaCodeQueues.pop(sessionId, None)
await _emitSessionEvent(sessionId, "mfaResolved", {
"success": success,
"timestamp": getIsoTimestamp(),
@@ -1844,7 +1844,7 @@ class TeamsbotService:
)
try:
- aiService = _createAiService(
+ aiService = createAiService(
self.currentUser, self.mandateId, self.instanceId
)
await aiService.ensureAiObjectsInitialized()
@@ -1976,7 +1976,7 @@ class TeamsbotService:
)
try:
- aiService = _createAiService(
+ aiService = createAiService(
self.currentUser, self.mandateId, self.instanceId
)
await aiService.ensureAiObjectsInitialized()
@@ -2195,7 +2195,7 @@ class TeamsbotService:
# Call SPEECH_TEAMS
try:
- aiService = _createAiService(self.currentUser, self.mandateId, self.instanceId)
+ aiService = createAiService(self.currentUser, self.mandateId, self.instanceId)
await aiService.ensureAiObjectsInitialized()
request = AiCallRequest(
@@ -3767,7 +3767,7 @@ class TeamsbotService:
)
try:
- aiService = _createAiService(
+ aiService = createAiService(
self.currentUser, self.mandateId, self.instanceId
)
await aiService.ensureAiObjectsInitialized()
@@ -3930,7 +3930,7 @@ class TeamsbotService:
"""Summarize a long user-provided session context to its essential points.
This reduces token usage in every subsequent AI call."""
try:
- aiService = _createAiService(self.currentUser, self.mandateId, self.instanceId)
+ aiService = createAiService(self.currentUser, self.mandateId, self.instanceId)
await aiService.ensureAiObjectsInitialized()
request = AiCallRequest(
@@ -3980,7 +3980,7 @@ class TeamsbotService:
lines.append(f"[{speaker}]: {text}")
textToSummarize = "\n".join(lines)
- aiService = _createAiService(self.currentUser, self.mandateId, self.instanceId)
+ aiService = createAiService(self.currentUser, self.mandateId, self.instanceId)
await aiService.ensureAiObjectsInitialized()
request = AiCallRequest(
@@ -4021,7 +4021,7 @@ class TeamsbotService:
for t in transcripts
)
- aiService = _createAiService(self.currentUser, self.mandateId, self.instanceId)
+ aiService = createAiService(self.currentUser, self.mandateId, self.instanceId)
await aiService.ensureAiObjectsInitialized()
request = AiCallRequest(
diff --git a/modules/features/trustee/accounting/accountingBridge.py b/modules/features/trustee/accounting/accountingBridge.py
index b91cd83e..2a267b73 100644
--- a/modules/features/trustee/accounting/accountingBridge.py
+++ b/modules/features/trustee/accounting/accountingBridge.py
@@ -16,7 +16,7 @@ from .accountingConnectorBase import (
AccountingChart,
SyncResult,
)
-from .accountingRegistry import _getAccountingRegistry
+from .accountingRegistry import getAccountingRegistry
logger = logging.getLogger(__name__)
@@ -26,7 +26,7 @@ class AccountingBridge:
def __init__(self, trusteeInterface):
self._trusteeInterface = trusteeInterface
- self._registry = _getAccountingRegistry()
+ self._registry = getAccountingRegistry()
async def getActiveConfig(self, featureInstanceId: str) -> Optional[Dict[str, Any]]:
"""Load the active TrusteeAccountingConfig for a feature instance."""
diff --git a/modules/features/trustee/accounting/accountingConnectorBase.py b/modules/features/trustee/accounting/accountingConnectorBase.py
index c5124184..5d76c997 100644
--- a/modules/features/trustee/accounting/accountingConnectorBase.py
+++ b/modules/features/trustee/accounting/accountingConnectorBase.py
@@ -39,6 +39,26 @@ class AccountingChart(BaseModel):
accountType: Optional[str] = None
+class AccountingPeriodBalance(BaseModel):
+ """Balance snapshot for one account in one period.
+
+ Mirrors the `TrusteeDataAccountBalance` table 1:1 so
+ `accountingDataSync._persistBalances` can persist connector output without
+ re-mapping. `closingBalance` is always the *cumulative* balance at the end
+ of the period (NOT the period's net movement). `periodMonth=0` denotes the
+ annual bucket (closing balance per fiscal year-end).
+ """
+ accountNumber: str
+ periodYear: int
+ periodMonth: int = 0
+ openingBalance: float = 0.0
+ debitTotal: float = 0.0
+ creditTotal: float = 0.0
+ closingBalance: float = 0.0
+ currency: str = "CHF"
+ asOfDate: Optional[str] = None
+
+
class SyncResult(BaseModel):
"""Result of a sync operation."""
success: bool
@@ -126,6 +146,31 @@ class BaseAccountingConnector(ABC):
accountNumbers: pre-fetched account numbers (avoids redundant API call). Override in connectors that support it."""
return []
+ async def getAccountBalances(
+ self,
+ config: Dict[str, Any],
+ years: List[int],
+ accountNumbers: Optional[List[str]] = None,
+ ) -> List[AccountingPeriodBalance]:
+ """Read closing balances per account and period from the external system.
+
+ Contract:
+ - One row per (accountNumber, periodYear, periodMonth).
+ - `periodMonth=0` => annual bucket (closing balance per fiscal year-end).
+ - `periodMonth=1..12` => closing balance per end of that calendar month.
+ - `closingBalance` MUST be the *cumulative* balance at period end,
+ including all prior-year carry-over and yearend bookings -- NOT the
+ period's net movement.
+ - `openingBalance` MUST be the cumulative balance at period start
+ (= previous period's closingBalance).
+
+ Default returns []; `AccountingDataSync` will then fall back to a
+ local cumulative aggregation from journal lines. Override in
+ connectors that can fetch authoritative balances from the source
+ system (e.g. RMA `/gl/saldo`).
+ """
+ return []
+
async def uploadDocument(
self,
config: Dict[str, Any],
diff --git a/modules/features/trustee/accounting/accountingDataSync.py b/modules/features/trustee/accounting/accountingDataSync.py
index ef8789ea..0770ead5 100644
--- a/modules/features/trustee/accounting/accountingDataSync.py
+++ b/modules/features/trustee/accounting/accountingDataSync.py
@@ -25,7 +25,7 @@ from pathlib import Path
from typing import Callable, Dict, Any, List, Optional, Type
from .accountingConnectorBase import BaseAccountingConnector
-from .accountingRegistry import _getAccountingRegistry
+from .accountingRegistry import getAccountingRegistry
logger = logging.getLogger(__name__)
@@ -33,6 +33,72 @@ logger = logging.getLogger(__name__)
_HEARTBEAT_EVERY = 500
+def _isIncomeStatementAccount(accountNumber: str) -> bool:
+ """Swiss KMU-Kontenrahmen heuristic: 1xxx + 2xxx -> balance sheet
+ (cumulative carry-over across years); 3xxx..9xxx -> income statement
+ (reset to 0 at fiscal-year start). Used by the local fallback only;
+ when a connector returns balances, those values are used verbatim.
+ """
+ a = (accountNumber or "").strip()
+ if not a or not a[0].isdigit():
+ return False
+ return a[0] not in ("1", "2")
+
+
+def _resolveBalanceYears(
+ dateFrom: Optional[str],
+ dateTo: Optional[str],
+ oldestBookingDate: Optional[str],
+ newestBookingDate: Optional[str],
+) -> List[int]:
+ """Derive the list of years for which the connector should compute balances.
+
+ Prefers the ``dateFrom``/``dateTo`` import window the user requested. Falls
+ back to the actual oldest/newest booking date observed in the imported
+ journal (so e.g. a `dateTo=None` import still produces balances for every
+ year that has data). If nothing is known, returns the current year as a
+ sensible default.
+ """
+ def _yearOf(s: Optional[str]) -> Optional[int]:
+ if not s:
+ return None
+ try:
+ return int(str(s)[:4])
+ except (TypeError, ValueError):
+ return None
+
+ fromYear = _yearOf(dateFrom) or _yearOf(oldestBookingDate)
+ toYear = _yearOf(dateTo) or _yearOf(newestBookingDate)
+ if fromYear is None and toYear is None:
+ return [time.gmtime().tm_year]
+ if fromYear is None:
+ fromYear = toYear
+ if toYear is None:
+ toYear = fromYear
+ if toYear < fromYear:
+ fromYear, toYear = toYear, fromYear
+ return list(range(fromYear, toYear + 1))
+
+
+def _balanceModelToRow(b: Any, scope: Dict[str, Any]) -> Dict[str, Any]:
+ """Map an ``AccountingPeriodBalance`` (or compatible dict) to a DB row."""
+ if isinstance(b, dict):
+ get = b.get
+ else:
+ get = lambda k, default=None: getattr(b, k, default)
+ return {
+ "accountNumber": str(get("accountNumber", "") or ""),
+ "periodYear": int(get("periodYear", 0) or 0),
+ "periodMonth": int(get("periodMonth", 0) or 0),
+ "openingBalance": round(float(get("openingBalance", 0) or 0), 2),
+ "debitTotal": round(float(get("debitTotal", 0) or 0), 2),
+ "creditTotal": round(float(get("creditTotal", 0) or 0), 2),
+ "closingBalance": round(float(get("closingBalance", 0) or 0), 2),
+ "currency": str(get("currency", "CHF") or "CHF"),
+ **scope,
+ }
+
+
def _isDebugDumpEnabled() -> bool:
"""Whether to write raw connector payloads to disk for offline inspection.
@@ -101,7 +167,7 @@ class AccountingDataSync:
def __init__(self, trusteeInterface):
self._if = trusteeInterface
- self._registry = _getAccountingRegistry()
+ self._registry = getAccountingRegistry()
async def importData(
self,
@@ -246,18 +312,39 @@ class AccountingDataSync:
logger.error(f"Import contacts failed: {e}", exc_info=True)
summary["errors"].append(f"Contacts: {e}")
- # ---- Phase 4: Compute account balances ----
- # Progress budget: 90-95 %. Pure DB aggregation, no external calls.
+ # ---- Phase 4: Account balances ----
+ # Progress budget: 88-95 %. Connector first (RMA -> /gl/saldo, Bexio
+ # & Abacus -> aggregated journal). On empty/failed connector output
+ # we fall back to a *correct* cumulative aggregation from the
+ # journal lines we just persisted.
+ connectorBalances: list = []
+ balanceSource = "local-fallback"
try:
- _progress(90, "Berechne Kontensaldi...")
+ _progress(88, "Lade Kontensaldi vom Buchhaltungssystem...")
+ balanceYears = _resolveBalanceYears(dateFrom, dateTo, summary.get("oldestBookingDate"), summary.get("newestBookingDate"))
+ connectorBalances = await connector.getAccountBalances(
+ connConfig,
+ years=balanceYears,
+ accountNumbers=fetchedAccountNumbers or None,
+ )
+ _dumpSyncData("accountBalances", connectorBalances)
+ if connectorBalances:
+ balanceSource = "connector"
+ except Exception as e:
+ logger.warning(f"Connector getAccountBalances failed, will use local fallback: {e}", exc_info=True)
+ summary["errors"].append(f"Balances connector: {e}")
+
+ try:
+ _progress(92, "Speichere Kontensaldi...")
balanceCount = await asyncio.to_thread(
self._persistBalances, featureInstanceId, mandateId,
TrusteeDataJournalEntry, TrusteeDataJournalLine, TrusteeDataAccountBalance,
+ connectorBalances, balanceSource,
)
summary["accountBalances"] = balanceCount
- _progress(95, f"{balanceCount} Saldi berechnet.")
+ _progress(95, f"{balanceCount} Saldi gespeichert (source={balanceSource}).")
except Exception as e:
- logger.error(f"Compute balances failed: {e}", exc_info=True)
+ logger.error(f"Persist balances failed: {e}", exc_info=True)
summary["errors"].append(f"Balances: {e}")
cfgId = cfgRecord.get("id")
@@ -401,12 +488,66 @@ class AccountingDataSync:
logger.info(f"Persisted {n} contacts for {featureInstanceId} in {time.time() - t0:.1f}s")
return n
- def _persistBalances(self, featureInstanceId: str, mandateId: str,
- modelEntry: Type, modelLine: Type, modelBalance: Type) -> int:
- """Re-aggregate journal lines into monthly + annual balances."""
+ def _persistBalances(
+ self,
+ featureInstanceId: str,
+ mandateId: str,
+ modelEntry: Type,
+ modelLine: Type,
+ modelBalance: Type,
+ connectorBalances: list,
+ source: str,
+ ) -> int:
+ """Persist account balances per (account, period) into ``TrusteeDataAccountBalance``.
+
+ Source of truth (``source="connector"``): the list returned by
+ ``BaseAccountingConnector.getAccountBalances`` is persisted 1:1.
+
+ Fallback (``source="local-fallback"``): aggregate the just-persisted
+ journal lines into **cumulative** balances. Unlike the previous
+ implementation, this version (a) carries the cumulative balance
+ forward across months/years for balance-sheet accounts, (b) resets
+ income-statement accounts at fiscal-year start, and (c) computes
+ ``openingBalance`` correctly as the previous period's
+ ``closingBalance``. ``openingBalance`` of the very first imported
+ period stays at 0 (no prior data available -- by design; see plan
+ document for rationale).
+ """
t0 = time.time()
self._bulkClear(modelBalance, featureInstanceId)
+ scope = {"featureInstanceId": featureInstanceId, "mandateId": mandateId}
+ if connectorBalances:
+ rows = [_balanceModelToRow(b, scope) for b in connectorBalances]
+ n = self._bulkCreate(modelBalance, rows)
+ logger.info(
+ f"Persisted {n} balances for {featureInstanceId} in {time.time() - t0:.1f}s "
+ f"(source={source})"
+ )
+ return n
+
+ rows = self._buildLocalBalanceFallback(featureInstanceId, modelEntry, modelLine, scope)
+ n = self._bulkCreate(modelBalance, rows)
+ logger.info(
+ f"Persisted {n} balances for {featureInstanceId} in {time.time() - t0:.1f}s "
+ f"(source={source})"
+ )
+ return n
+
+ def _buildLocalBalanceFallback(
+ self,
+ featureInstanceId: str,
+ modelEntry: Type,
+ modelLine: Type,
+ scope: Dict[str, Any],
+ ) -> List[Dict[str, Any]]:
+ """Aggregate ``TrusteeDataJournalLine`` rows into cumulative period balances.
+
+ Returns rows ready for ``_bulkCreate``. Walks every account
+ chronologically through all years observed in the journal so the
+ cumulative balance and per-period opening are exact (within the
+ bounds of the imported window).
+ """
entries = self._if.db.getRecordset(
modelEntry, recordFilter={"featureInstanceId": featureInstanceId},
) or []
@@ -421,7 +562,9 @@ class AccountingDataSync:
modelLine, recordFilter={"featureInstanceId": featureInstanceId},
) or []
- buckets: Dict[tuple, Dict[str, float]] = defaultdict(lambda: {"debit": 0.0, "credit": 0.0})
+ movements: Dict[tuple, Dict[str, float]] = defaultdict(lambda: {"debit": 0.0, "credit": 0.0})
+ observedYears: set = set()
+ observedAccounts: set = set()
for ln in lines:
if isinstance(ln, dict):
jeid = ln.get("journalEntryId", "")
@@ -437,7 +580,7 @@ class AccountingDataSync:
bdate = entryDates.get(jeid, "")
if not accNo or not bdate:
continue
- parts = bdate.split("-")
+ parts = str(bdate).split("-")
if len(parts) < 2:
continue
try:
@@ -445,29 +588,56 @@ class AccountingDataSync:
month = int(parts[1])
except ValueError:
continue
+ movements[(accNo, year, month)]["debit"] += debit
+ movements[(accNo, year, month)]["credit"] += credit
+ observedYears.add(year)
+ observedAccounts.add(accNo)
- buckets[(accNo, year, month)]["debit"] += debit
- buckets[(accNo, year, month)]["credit"] += credit
- buckets[(accNo, year, 0)]["debit"] += debit
- buckets[(accNo, year, 0)]["credit"] += credit
+ if not observedYears or not observedAccounts:
+ return []
- scope = {"featureInstanceId": featureInstanceId, "mandateId": mandateId}
- rows = [{
- "accountNumber": accNo,
- "periodYear": year,
- "periodMonth": month,
- "openingBalance": 0.0,
- "debitTotal": round(totals["debit"], 2),
- "creditTotal": round(totals["credit"], 2),
- "closingBalance": round(totals["debit"] - totals["credit"], 2),
- "currency": "CHF",
- **scope,
- } for (accNo, year, month), totals in buckets.items()]
- n = self._bulkCreate(modelBalance, rows)
- logger.info(
- f"Persisted {n} balances for {featureInstanceId} in {time.time() - t0:.1f}s"
- )
- return n
+ sortedYears = sorted(observedYears)
+ rows: List[Dict[str, Any]] = []
+ for accNo in sorted(observedAccounts):
+ isER = _isIncomeStatementAccount(accNo)
+ cumulativeOpeningOfYear = 0.0
+ for year in sortedYears:
+ yearOpening = 0.0 if isER else cumulativeOpeningOfYear
+ running = yearOpening
+ yearDebit = 0.0
+ yearCredit = 0.0
+ for month in range(1, 13):
+ opening = running
+ mov = movements.get((accNo, year, month), {"debit": 0.0, "credit": 0.0})
+ running = opening + mov["debit"] - mov["credit"]
+ yearDebit += mov["debit"]
+ yearCredit += mov["credit"]
+ if mov["debit"] == 0 and mov["credit"] == 0 and opening == 0 and running == 0:
+ continue
+ rows.append({
+ "accountNumber": accNo,
+ "periodYear": year,
+ "periodMonth": month,
+ "openingBalance": round(opening, 2),
+ "debitTotal": round(mov["debit"], 2),
+ "creditTotal": round(mov["credit"], 2),
+ "closingBalance": round(running, 2),
+ "currency": "CHF",
+ **scope,
+ })
+ rows.append({
+ "accountNumber": accNo,
+ "periodYear": year,
+ "periodMonth": 0,
+ "openingBalance": round(yearOpening, 2),
+ "debitTotal": round(yearDebit, 2),
+ "creditTotal": round(yearCredit, 2),
+ "closingBalance": round(running, 2),
+ "currency": "CHF",
+ **scope,
+ })
+ cumulativeOpeningOfYear = running
+ return rows
# ===== Low-level bulk helpers =====
diff --git a/modules/features/trustee/accounting/accountingRegistry.py b/modules/features/trustee/accounting/accountingRegistry.py
index ca5e27d9..fe1b20d5 100644
--- a/modules/features/trustee/accounting/accountingRegistry.py
+++ b/modules/features/trustee/accounting/accountingRegistry.py
@@ -74,7 +74,7 @@ class AccountingRegistry:
_registryInstance: Optional[AccountingRegistry] = None
-def _getAccountingRegistry() -> AccountingRegistry:
+def getAccountingRegistry() -> AccountingRegistry:
"""Singleton access to the accounting registry."""
global _registryInstance
if _registryInstance is None:
diff --git a/modules/features/trustee/accounting/connectors/accountingConnectorAbacus.py b/modules/features/trustee/accounting/connectors/accountingConnectorAbacus.py
index 0269a654..e03e7df7 100644
--- a/modules/features/trustee/accounting/connectors/accountingConnectorAbacus.py
+++ b/modules/features/trustee/accounting/connectors/accountingConnectorAbacus.py
@@ -6,12 +6,22 @@ API docs: https://downloads.abacus.ch/fileadmin/ablage/abaconnect/htmlfiles/docs
Auth: OAuth 2.0 Client Credentials (Service User).
Each Abacus instance has its own host URL; there is no central cloud endpoint.
Entity API uses OData V4 format.
+
+Account balances:
+ Abacus exposes an ``AccountBalances`` entity (per fiscal year), but its
+ availability depends on the customer's Abacus license / Profile and is
+ NOT guaranteed for all instances. The robust default is therefore to
+ aggregate balances locally from ``GeneralJournalEntries`` (always
+ present). If a future iteration confirms the entity for a specific
+ instance, ``getAccountBalances`` can be extended to prefer that source
+ via a config flag (e.g. ``useAccountBalancesEntity: true``).
"""
import base64
+import calendar
import logging
import time
-from typing import List, Dict, Any, Optional
+from typing import List, Dict, Any, Optional, Tuple
import aiohttp
@@ -19,6 +29,7 @@ from ..accountingConnectorBase import (
BaseAccountingConnector,
AccountingBooking,
AccountingChart,
+ AccountingPeriodBalance,
ConnectorConfigField,
SyncResult,
)
@@ -27,6 +38,21 @@ from modules.shared.i18nRegistry import t
logger = logging.getLogger(__name__)
+def _formatLastDayOfMonth(year: int, month: int) -> str:
+ lastDay = calendar.monthrange(year, month)[1]
+ return f"{year:04d}-{month:02d}-{lastDay:02d}"
+
+
+def _isIncomeStatementAccount(accountNumber: str) -> bool:
+ """Swiss KMU-Kontenrahmen heuristic: 1xxx + 2xxx -> balance sheet (cumulative);
+ 3xxx..9xxx -> income statement (reset per fiscal year).
+ """
+ a = (accountNumber or "").strip()
+ if not a or not a[0].isdigit():
+ return False
+ return a[0] not in ("1", "2")
+
+
class AccountingConnectorAbacus(BaseAccountingConnector):
def __init__(self):
@@ -341,3 +367,158 @@ class AccountingConnectorAbacus(BaseAccountingConnector):
except Exception as e:
logger.error(f"Abacus getVendors error: {e}")
return []
+
+ async def getAccountBalances(
+ self,
+ config: Dict[str, Any],
+ years: List[int],
+ accountNumbers: Optional[List[str]] = None,
+ ) -> List[AccountingPeriodBalance]:
+ """Aggregate account balances from ``GeneralJournalEntries`` (OData V4).
+
+ Strategy:
+ 1. Page through ``GET GeneralJournalEntries?$filter=JournalDate le YYYY-12-31``
+ until ``@odata.nextLink`` is exhausted. Including ALL prior years
+ is required to compute the carry-over for balance-sheet accounts.
+ 2. Per (account, year, month) accumulate ``DebitAmount``/``CreditAmount``
+ from ``Lines``.
+ 3. Income-statement accounts (3xxx-9xxx) reset to 0 per fiscal year;
+ balance-sheet accounts (1xxx-2xxx) carry their cumulative balance.
+
+ Optional optimization (not yet active): if the customer's Abacus
+ instance ships the ``AccountBalances`` OData entity, it can return
+ authoritative period balances directly. Detect via a probe GET on
+ ``AccountBalances?$top=1`` and prefer that source. This is intentionally
+ deferred until we hit a customer where the entity is available --
+ the local aggregation is always-correct fallback.
+ """
+ if not years:
+ return []
+ sortedYears = sorted({int(y) for y in years if y})
+ minYear = sortedYears[0]
+ maxYear = sortedYears[-1]
+ accountNumbersSet = set(accountNumbers) if accountNumbers else None
+
+ headers = await self._buildAuthHeaders(config)
+ if not headers:
+ logger.warning("Abacus getAccountBalances: no access token, skipping")
+ return []
+
+ rawEntries = await self._fetchAllJournalEntries(config, headers, dateTo=f"{maxYear}-12-31")
+
+ movements: Dict[Tuple[str, int, int], Dict[str, float]] = {}
+ seenAccounts: set = set()
+ for entry in rawEntries:
+ dateRaw = str(entry.get("JournalDate") or "")[:10]
+ if len(dateRaw) < 7:
+ continue
+ try:
+ year = int(dateRaw[:4])
+ month = int(dateRaw[5:7])
+ except ValueError:
+ continue
+ for line in (entry.get("Lines") or []):
+ accNo = str(line.get("AccountId") or "").strip()
+ if not accNo:
+ continue
+ seenAccounts.add(accNo)
+ try:
+ debit = float(line.get("DebitAmount") or 0)
+ credit = float(line.get("CreditAmount") or 0)
+ except (TypeError, ValueError):
+ continue
+ if debit == 0 and credit == 0:
+ continue
+ bucket = movements.setdefault((accNo, year, month), {"debit": 0.0, "credit": 0.0})
+ bucket["debit"] += debit
+ bucket["credit"] += credit
+
+ results: List[AccountingPeriodBalance] = []
+ for accNo in sorted(seenAccounts):
+ if accountNumbersSet is not None and accNo not in accountNumbersSet:
+ continue
+ isER = _isIncomeStatementAccount(accNo)
+
+ preMinYearBalance = 0.0
+ if not isER:
+ for (a, yr, _mo), m in movements.items():
+ if a == accNo and yr < minYear:
+ preMinYearBalance += m["debit"] - m["credit"]
+
+ cumulativeOpeningOfYear = preMinYearBalance
+ for year in sortedYears:
+ yearOpening = 0.0 if isER else cumulativeOpeningOfYear
+ running = yearOpening
+ yearDebit = 0.0
+ yearCredit = 0.0
+ for month in range(1, 13):
+ opening = running
+ mov = movements.get((accNo, year, month), {"debit": 0.0, "credit": 0.0})
+ running = opening + mov["debit"] - mov["credit"]
+ yearDebit += mov["debit"]
+ yearCredit += mov["credit"]
+ results.append(AccountingPeriodBalance(
+ accountNumber=accNo,
+ periodYear=year,
+ periodMonth=month,
+ openingBalance=round(opening, 2),
+ debitTotal=round(mov["debit"], 2),
+ creditTotal=round(mov["credit"], 2),
+ closingBalance=round(running, 2),
+ currency="CHF",
+ asOfDate=_formatLastDayOfMonth(year, month),
+ ))
+
+ results.append(AccountingPeriodBalance(
+ accountNumber=accNo,
+ periodYear=year,
+ periodMonth=0,
+ openingBalance=round(yearOpening, 2),
+ debitTotal=round(yearDebit, 2),
+ creditTotal=round(yearCredit, 2),
+ closingBalance=round(running, 2),
+ currency="CHF",
+ asOfDate=f"{year}-12-31",
+ ))
+
+ cumulativeOpeningOfYear = running
+
+ logger.info(
+ "Abacus getAccountBalances: %s rows from %s journal entries (years=%s)",
+ len(results), len(rawEntries), sortedYears,
+ )
+ return results
+
+ async def _fetchAllJournalEntries(
+ self,
+ config: Dict[str, Any],
+ headers: Dict[str, str],
+ dateTo: str,
+ ) -> List[Dict[str, Any]]:
+ """Page through ``GeneralJournalEntries`` (OData V4) following ``@odata.nextLink``.
+
+ We filter ``JournalDate le dateTo`` to bound the result, but include
+ ALL prior years (no lower bound) so cumulative balance-sheet
+ carry-over is correct.
+ """
+ results: List[Dict[str, Any]] = []
+ baseUrl = self._buildEntityUrl(config, f"GeneralJournalEntries?$filter=JournalDate le {dateTo}")
+ nextUrl: Optional[str] = baseUrl
+ async with aiohttp.ClientSession() as session:
+ while nextUrl:
+ try:
+ async with session.get(nextUrl, headers=headers, timeout=aiohttp.ClientTimeout(total=60)) as resp:
+ if resp.status != 200:
+ body = await resp.text()
+ logger.warning("Abacus GeneralJournalEntries HTTP %s: %s", resp.status, body[:200])
+ break
+ data = await resp.json()
+ except Exception as ex:
+ logger.warning("Abacus GeneralJournalEntries request failed: %s", ex)
+ break
+ page = data.get("value") or []
+ if not isinstance(page, list):
+ break
+ results.extend(page)
+ nextUrl = data.get("@odata.nextLink")
+ return results
diff --git a/modules/features/trustee/accounting/connectors/accountingConnectorBexio.py b/modules/features/trustee/accounting/connectors/accountingConnectorBexio.py
index dcb3233d..28c2a334 100644
--- a/modules/features/trustee/accounting/connectors/accountingConnectorBexio.py
+++ b/modules/features/trustee/accounting/connectors/accountingConnectorBexio.py
@@ -7,10 +7,20 @@ Auth: Personal Access Token (PAT) as Bearer token.
Base URL: https://api.bexio.com/
Note: Bexio uses internal account IDs (int), not account numbers.
The connector caches the chart of accounts to resolve accountNumber -> account_id.
+
+Account balances:
+ Bexio does NOT expose a dedicated saldo endpoint (no equivalent to RMA's
+ ``/gl/saldo``). ``getAccountBalances`` therefore aggregates balances
+ locally by paginating ``GET /3.0/accounting/journal`` (max 2000 rows per
+ page) and computing cumulative balances per (account, period). Income-
+ statement accounts (3xxx-9xxx in the Swiss KMU-Kontenrahmen) are reset
+ at the start of each fiscal year; balance-sheet accounts (1xxx-2xxx)
+ carry their cumulative balance across years.
"""
+import calendar
import logging
-from typing import List, Dict, Any, Optional
+from typing import List, Dict, Any, Optional, Tuple
import aiohttp
@@ -18,6 +28,7 @@ from ..accountingConnectorBase import (
BaseAccountingConnector,
AccountingBooking,
AccountingChart,
+ AccountingPeriodBalance,
ConnectorConfigField,
SyncResult,
)
@@ -26,6 +37,23 @@ from modules.shared.i18nRegistry import t
logger = logging.getLogger(__name__)
_DEFAULT_API_BASE_URL = "https://api.bexio.com/"
+_JOURNAL_PAGE_SIZE = 2000
+
+
+def _formatLastDayOfMonth(year: int, month: int) -> str:
+ lastDay = calendar.monthrange(year, month)[1]
+ return f"{year:04d}-{month:02d}-{lastDay:02d}"
+
+
+def _isIncomeStatementAccount(accountNumber: str) -> bool:
+ """Swiss KMU-Kontenrahmen: 1xxx Aktiven + 2xxx Passiven -> balance sheet
+ (cumulative balance carried across years); 3xxx..9xxx -> income statement
+ (reset to 0 at fiscal-year start).
+ """
+ a = (accountNumber or "").strip()
+ if not a or not a[0].isdigit():
+ return False
+ return a[0] not in ("1", "2")
class AccountingConnectorBexio(BaseAccountingConnector):
@@ -260,3 +288,148 @@ class AccountingConnectorBexio(BaseAccountingConnector):
except Exception as e:
logger.error(f"Bexio getCustomers error: {e}")
return []
+
+ async def getAccountBalances(
+ self,
+ config: Dict[str, Any],
+ years: List[int],
+ accountNumbers: Optional[List[str]] = None,
+ ) -> List[AccountingPeriodBalance]:
+ """Aggregate account balances locally from ``/3.0/accounting/journal``.
+
+ Bexio offers no per-account saldo endpoint, so we paginate the full
+ journal up to the latest requested fiscal year-end and compute
+ opening / debit / credit / closing per (account, period). For balance-
+ sheet accounts the cumulative carry-over from prior years is included;
+ for income-statement accounts the balance is reset at the start of
+ every requested fiscal year (per Swiss accounting principles).
+ """
+ if not years:
+ return []
+ sortedYears = sorted({int(y) for y in years if y})
+ minYear = sortedYears[0]
+ maxYear = sortedYears[-1]
+ accountNumbersSet = set(accountNumbers) if accountNumbers else None
+
+ accounts = await self._loadRawAccounts(config)
+ accIdToNumber: Dict[int, str] = {acc.get("id"): str(acc.get("account_no", "")) for acc in accounts if acc.get("id") is not None and acc.get("account_no") is not None}
+ if not accIdToNumber:
+ logger.warning("Bexio getAccountBalances: chart of accounts is empty -- cannot derive balances")
+ return []
+
+ rawEntries = await self._fetchAllJournalRows(config, dateTo=f"{maxYear}-12-31")
+
+ movements: Dict[Tuple[str, int, int], Dict[str, float]] = {}
+ for e in rawEntries:
+ dateRaw = str(e.get("date") or "")[:10]
+ if len(dateRaw) < 7:
+ continue
+ try:
+ year = int(dateRaw[:4])
+ month = int(dateRaw[5:7])
+ except ValueError:
+ continue
+ try:
+ amount = float(e.get("amount") or 0)
+ except (TypeError, ValueError):
+ continue
+ if amount == 0:
+ continue
+ debitAcc = accIdToNumber.get(e.get("debit_account_id"))
+ creditAcc = accIdToNumber.get(e.get("credit_account_id"))
+ if debitAcc:
+ bucket = movements.setdefault((debitAcc, year, month), {"debit": 0.0, "credit": 0.0})
+ bucket["debit"] += amount
+ if creditAcc:
+ bucket = movements.setdefault((creditAcc, year, month), {"debit": 0.0, "credit": 0.0})
+ bucket["credit"] += amount
+
+ accountsByNumber = sorted({n for n in accIdToNumber.values() if n})
+ results: List[AccountingPeriodBalance] = []
+
+ for accNo in accountsByNumber:
+ if accountNumbersSet is not None and accNo not in accountNumbersSet:
+ continue
+ isER = _isIncomeStatementAccount(accNo)
+
+ preMinYearBalance = 0.0
+ if not isER:
+ for (a, yr, _mo), m in movements.items():
+ if a == accNo and yr < minYear:
+ preMinYearBalance += m["debit"] - m["credit"]
+
+ cumulativeOpeningOfYear = preMinYearBalance
+ for year in sortedYears:
+ if isER:
+ yearOpening = 0.0
+ else:
+ yearOpening = cumulativeOpeningOfYear
+
+ running = yearOpening
+ yearDebit = 0.0
+ yearCredit = 0.0
+ for month in range(1, 13):
+ opening = running
+ mov = movements.get((accNo, year, month), {"debit": 0.0, "credit": 0.0})
+ running = opening + mov["debit"] - mov["credit"]
+ yearDebit += mov["debit"]
+ yearCredit += mov["credit"]
+ results.append(AccountingPeriodBalance(
+ accountNumber=accNo,
+ periodYear=year,
+ periodMonth=month,
+ openingBalance=round(opening, 2),
+ debitTotal=round(mov["debit"], 2),
+ creditTotal=round(mov["credit"], 2),
+ closingBalance=round(running, 2),
+ currency="CHF",
+ asOfDate=_formatLastDayOfMonth(year, month),
+ ))
+
+ results.append(AccountingPeriodBalance(
+ accountNumber=accNo,
+ periodYear=year,
+ periodMonth=0,
+ openingBalance=round(yearOpening, 2),
+ debitTotal=round(yearDebit, 2),
+ creditTotal=round(yearCredit, 2),
+ closingBalance=round(running, 2),
+ currency="CHF",
+ asOfDate=f"{year}-12-31",
+ ))
+
+ cumulativeOpeningOfYear = running
+
+ logger.info("Bexio getAccountBalances: %s rows from %s journal entries (years=%s)", len(results), len(rawEntries), sortedYears)
+ return results
+
+ async def _fetchAllJournalRows(self, config: Dict[str, Any], dateTo: str) -> List[Dict[str, Any]]:
+ """Paginate ``GET /3.0/accounting/journal?to=YYYY-12-31`` and return all rows.
+
+ Bexio caps page size at 2000; we fetch until a short page is returned.
+ Failures abort early (returning whatever rows were collected) -- the
+ caller logs the row count, so partial data is visible.
+ """
+ rows: List[Dict[str, Any]] = []
+ offset = 0
+ url = self._buildUrl(config, "3.0/accounting/journal")
+ async with aiohttp.ClientSession() as session:
+ while True:
+ params = {"to": dateTo, "limit": str(_JOURNAL_PAGE_SIZE), "offset": str(offset)}
+ try:
+ async with session.get(url, headers=self._buildHeaders(config), params=params, timeout=aiohttp.ClientTimeout(total=60)) as resp:
+ if resp.status != 200:
+ body = await resp.text()
+ logger.warning("Bexio /accounting/journal HTTP %s offset=%s: %s", resp.status, offset, body[:200])
+ break
+ page = await resp.json()
+ except Exception as ex:
+ logger.warning("Bexio /accounting/journal request failed offset=%s: %s", offset, ex)
+ break
+ if not isinstance(page, list) or not page:
+ break
+ rows.extend(page)
+ if len(page) < _JOURNAL_PAGE_SIZE:
+ break
+ offset += _JOURNAL_PAGE_SIZE
+ return rows
diff --git a/modules/features/trustee/accounting/connectors/accountingConnectorRma.py b/modules/features/trustee/accounting/connectors/accountingConnectorRma.py
index 9e372099..98634127 100644
--- a/modules/features/trustee/accounting/connectors/accountingConnectorRma.py
+++ b/modules/features/trustee/accounting/connectors/accountingConnectorRma.py
@@ -9,6 +9,7 @@ Base URL: https://service.runmyaccounts.com/api/latest/clients/{clientName}/
"""
import asyncio
+import calendar
import json
import logging
import re
@@ -21,6 +22,7 @@ from ..accountingConnectorBase import (
BaseAccountingConnector,
AccountingBooking,
AccountingChart,
+ AccountingPeriodBalance,
ConnectorConfigField,
SyncResult,
)
@@ -31,6 +33,73 @@ logger = logging.getLogger(__name__)
_DEFAULT_API_BASE_URL = "https://service.runmyaccounts.com/api/latest/clients/"
+def _formatLastDayOfMonth(year: int, month: int) -> str:
+ """Return ``YYYY-MM-DD`` of the last day of a calendar month."""
+ lastDay = calendar.monthrange(year, month)[1]
+ return f"{year:04d}-{month:02d}-{lastDay:02d}"
+
+
+def _isIncomeStatementAccount(accountNumber: str) -> bool:
+ """Decide whether an account is part of the income statement (Erfolgsrechnung).
+
+ Swiss KMU-Kontenrahmen: 1xxx Aktiven, 2xxx Passiven (incl. 28xx
+ Eigenkapital) -> balance sheet; 3xxx..9xxx -> income statement.
+ Used by the RMA connector to choose between the two `/gl/saldo` query
+ variants (with vs. without ``from`` parameter).
+ """
+ a = (accountNumber or "").strip()
+ if not a or not a[0].isdigit():
+ return False
+ return a[0] not in ("1", "2")
+
+
+def _parseSaldoBody(body: str) -> List[tuple]:
+ """Parse the response body of ``GET /gl/saldo`` (JSON or XML).
+
+ Returns a list of ``(accountNumber, saldo)`` tuples. The endpoint
+ delivers ``{"row": [{"column": [accno, label, saldo]}, ...]}`` (JSON) or
+ ``accno label saldo
...``
+ (XML). Rows that cannot be parsed are silently skipped to keep one bad row
+ from poisoning the whole sync.
+ """
+ if not body or not body.strip():
+ return []
+ rows: List[tuple] = []
+ try:
+ data = json.loads(body)
+ items = data.get("row") if isinstance(data, dict) else data
+ if isinstance(items, dict):
+ items = [items]
+ if isinstance(items, list):
+ for item in items:
+ if not isinstance(item, dict):
+ continue
+ cols = item.get("column") or []
+ if isinstance(cols, list) and len(cols) >= 3:
+ accno = str(cols[0]).strip()
+ try:
+ saldo = float(cols[2])
+ except (TypeError, ValueError):
+ continue
+ if accno:
+ rows.append((accno, saldo))
+ return rows
+ except (json.JSONDecodeError, ValueError):
+ pass
+ rowMatches = re.findall(r"(.*?)
", body, re.DOTALL)
+ for raw in rowMatches:
+ cols = re.findall(r"([^<]*) ", raw)
+ if len(cols) >= 3:
+ accno = cols[0].strip()
+ try:
+ saldo = float(cols[2])
+ except (TypeError, ValueError):
+ continue
+ if accno:
+ rows.append((accno, saldo))
+ return rows
+
+
class AccountingConnectorRma(BaseAccountingConnector):
def getConnectorType(self) -> str:
@@ -447,6 +516,191 @@ class AccountingConnectorRma(BaseAccountingConnector):
logger.error(f"RMA getJournalEntries error: {e}", exc_info=True)
return []
+ async def getAccountBalances(
+ self,
+ config: Dict[str, Any],
+ years: List[int],
+ accountNumbers: Optional[List[str]] = None,
+ ) -> List[AccountingPeriodBalance]:
+ """Fetch authoritative closing balances per account and period via RMA's
+ ``GET /gl/saldo`` endpoint.
+
+ For each requested year we issue 13 API calls (one per month-end + one
+ for the prior fiscal year-end as opening reference). The endpoint
+ returns the cumulative balance per account at the requested ``to`` date,
+ already including prior-year carry-over and yearend bookings -- which
+ is exactly the value the local journal-line aggregation cannot
+ reconstruct when the import window covers only part of the history.
+
+ ``accno`` is mandatory; we use a digit-length-grouped wildcard
+ (``xxxx`` matches all 4-digit accounts, ``xxxxx`` all 5-digit, etc.)
+ derived from the chart of accounts, so 1-2 calls cover every account
+ per period.
+ """
+ if not years:
+ return []
+
+ accountNumbersSet: Optional[set] = set(accountNumbers) if accountNumbers else None
+ wildcardPatterns = await self._resolveWildcardPatterns(config)
+ if not wildcardPatterns:
+ logger.warning("RMA getAccountBalances: chart of accounts is empty, no wildcards derivable")
+ return []
+
+ results: List[AccountingPeriodBalance] = []
+ sortedYears = sorted({int(y) for y in years if y})
+
+ for year in sortedYears:
+ priorYearEnd = f"{year - 1}-12-31"
+ priorSaldosRaw = await self._fetchSaldoMapForDate(config, wildcardPatterns, priorYearEnd)
+ # ER (income statement) accounts reset to 0 at the start of each
+ # fiscal year -- prior-year YTD must NOT carry forward as opening.
+ priorSaldos = {a: (0.0 if _isIncomeStatementAccount(a) else v) for a, v in priorSaldosRaw.items()}
+
+ runningOpening: Dict[str, float] = dict(priorSaldos)
+ decSaldos: Dict[str, float] = {}
+
+ for month in range(1, 13):
+ lastDay = _formatLastDayOfMonth(year, month)
+ saldos = await self._fetchSaldoMapForDate(config, wildcardPatterns, lastDay)
+
+ accountKeys = set(saldos.keys()) | set(runningOpening.keys())
+ for accno in accountKeys:
+ if accountNumbersSet is not None and accno not in accountNumbersSet:
+ continue
+ closing = saldos.get(accno, runningOpening.get(accno, 0.0))
+ opening = runningOpening.get(accno, 0.0)
+ results.append(AccountingPeriodBalance(
+ accountNumber=accno,
+ periodYear=year,
+ periodMonth=month,
+ openingBalance=round(opening, 2),
+ closingBalance=round(closing, 2),
+ currency="CHF",
+ asOfDate=lastDay,
+ ))
+ runningOpening = {**runningOpening, **saldos}
+ if month == 12:
+ decSaldos = dict(saldos)
+
+ annualKeys = set(decSaldos.keys()) | set(priorSaldos.keys())
+ for accno in annualKeys:
+ if accountNumbersSet is not None and accno not in accountNumbersSet:
+ continue
+ closing = decSaldos.get(accno, priorSaldos.get(accno, 0.0))
+ opening = priorSaldos.get(accno, 0.0)
+ results.append(AccountingPeriodBalance(
+ accountNumber=accno,
+ periodYear=year,
+ periodMonth=0,
+ openingBalance=round(opening, 2),
+ closingBalance=round(closing, 2),
+ currency="CHF",
+ asOfDate=f"{year}-12-31",
+ ))
+
+ logger.info(
+ "RMA getAccountBalances: %s rows for years=%s, wildcards=%s",
+ len(results), sortedYears, wildcardPatterns,
+ )
+ return results
+
+ async def _resolveWildcardPatterns(self, config: Dict[str, Any]) -> List[str]:
+ """Derive `accno` wildcard patterns from the chart of accounts.
+
+ RMA's `/gl/saldo` requires `accno`; using digit-length-grouped
+ wildcards (`xxxx`, `xxxxx`, ...) lets us cover every account in 1-2
+ calls per period instead of one call per account number.
+ """
+ try:
+ charts = await self.getChartOfAccounts(config)
+ except Exception as ex:
+ logger.warning("RMA _resolveWildcardPatterns: getChartOfAccounts failed: %s", ex)
+ return []
+ lengths = set()
+ for c in charts:
+ accno = (c.accountNumber or "").strip()
+ if accno.isdigit():
+ lengths.add(len(accno))
+ return [("x" * n) for n in sorted(lengths)]
+
+ async def _fetchSaldoMapForDate(
+ self,
+ config: Dict[str, Any],
+ wildcardPatterns: List[str],
+ toDate: str,
+ ) -> Dict[str, float]:
+ """Call `/gl/saldo` and return ``{accountNumber: cumulativeSaldo}``.
+
+ Per RMA docs ("Warning: Chart of the balance sheet do not need a from
+ date. Charts of the income statement need from and to parameter."),
+ we issue **two** calls per pattern:
+
+ * No ``from`` -> correct cumulative saldo for balance-sheet accounts
+ (1xxx, 2xxx in Swiss KMU-Kontenrahmen).
+ * ``from=YYYY-01-01`` (year of ``toDate``) -> correct YTD result for
+ income-statement accounts (3xxx..9xxx, which reset annually).
+
+ Per account number we keep the value from the appropriate call.
+ Empty / failed responses are logged at DEBUG and skipped to avoid
+ aborting the whole sync.
+ """
+ yearStart = f"{toDate[:4]}-01-01"
+ bsRows: Dict[str, float] = {}
+ erRows: Dict[str, float] = {}
+ for pattern in wildcardPatterns:
+ try:
+ bs = await self._fetchSaldoRows(config, accno=pattern, fromDate=None, toDate=toDate)
+ except Exception as ex:
+ logger.debug("RMA _fetchSaldoMapForDate(BS, pattern=%s, to=%s) failed: %s", pattern, toDate, ex)
+ bs = []
+ try:
+ er = await self._fetchSaldoRows(config, accno=pattern, fromDate=yearStart, toDate=toDate)
+ except Exception as ex:
+ logger.debug("RMA _fetchSaldoMapForDate(ER, pattern=%s, %s..%s) failed: %s", pattern, yearStart, toDate, ex)
+ er = []
+ for accno, saldo in bs:
+ bsRows[accno] = saldo
+ for accno, saldo in er:
+ erRows[accno] = saldo
+
+ merged: Dict[str, float] = {}
+ for accno in set(bsRows) | set(erRows):
+ if _isIncomeStatementAccount(accno):
+ merged[accno] = erRows.get(accno, bsRows.get(accno, 0.0))
+ else:
+ merged[accno] = bsRows.get(accno, erRows.get(accno, 0.0))
+ return merged
+
+ async def _fetchSaldoRows(
+ self,
+ config: Dict[str, Any],
+ accno: str,
+ fromDate: Optional[str],
+ toDate: str,
+ ) -> List[tuple]:
+ """Single `/gl/saldo` call. Returns list of ``(accountNumber, saldo)`` tuples."""
+ url = self._buildUrl(config, "gl/saldo")
+ params: Dict[str, str] = {
+ "accno": accno,
+ "to": toDate,
+ "bookkeeping_main_curr": "true",
+ }
+ if fromDate:
+ params["from"] = fromDate
+ async with aiohttp.ClientSession() as session:
+ async with session.get(
+ url,
+ headers=self._buildHeaders(config),
+ params=params,
+ timeout=aiohttp.ClientTimeout(total=20),
+ ) as resp:
+ if resp.status != 200:
+ body = await resp.text()
+ logger.debug("RMA /gl/saldo accno=%s from=%s to=%s -> HTTP %s: %s", accno, fromDate, toDate, resp.status, body[:200])
+ return []
+ body = await resp.text()
+ return _parseSaldoBody(body)
+
async def _fetchGlBulk(self, config: Dict[str, Any], params: Dict[str, str]) -> List[Dict[str, Any]]:
"""Try GET /gl to fetch journal entries in bulk (not all RMA versions support this)."""
try:
diff --git a/modules/features/trustee/interfaceFeatureTrustee.py b/modules/features/trustee/interfaceFeatureTrustee.py
index b1a6aab6..9f1c911a 100644
--- a/modules/features/trustee/interfaceFeatureTrustee.py
+++ b/modules/features/trustee/interfaceFeatureTrustee.py
@@ -1109,10 +1109,15 @@ class TrusteeObjects:
)
def _cleanDocumentRecords(records):
- return [
- TrusteeDocument(**{k: v for k, v in r.items() if not k.startswith("_") and k != "documentData"})
- for r in records
- ]
+ cleaned = []
+ for r in records:
+ labelCols = {k: v for k, v in r.items() if k.endswith("Label")}
+ filteredFields = {k: v for k, v in r.items() if not k.startswith("_") and k != "documentData"}
+ doc = TrusteeDocument(**filteredFields)
+ d = doc.model_dump()
+ d.update(labelCols)
+ cleaned.append(d)
+ return cleaned
if isinstance(result, PaginatedResult):
result.items = _cleanDocumentRecords(result.items)
@@ -1133,10 +1138,15 @@ class TrusteeObjects:
)
def _cleanDocumentRecords(records):
- return [
- TrusteeDocument(**{k: v for k, v in r.items() if not k.startswith("_") and k != "documentData"})
- for r in records
- ]
+ cleaned = []
+ for r in records:
+ labelCols = {k: v for k, v in r.items() if k.endswith("Label")}
+ filteredFields = {k: v for k, v in r.items() if not k.startswith("_") and k != "documentData"}
+ doc = TrusteeDocument(**filteredFields)
+ d = doc.model_dump()
+ d.update(labelCols)
+ cleaned.append(d)
+ return cleaned
if isinstance(result, PaginatedResult):
result.items = _cleanDocumentRecords(result.items)
@@ -1297,10 +1307,13 @@ class TrusteeObjects:
def _cleanAndValidate(records):
items = []
for record in records:
+ labelCols = {k: v for k, v in record.items() if k.endswith("Label")}
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_") or k in keepFields}
position = self._toTrusteePositionOrDelete(cleanedRecord, deleteCorrupt=True)
if position is not None:
- items.append(position)
+ d = position.model_dump()
+ d.update(labelCols)
+ items.append(d)
return items
if isinstance(result, PaginatedResult):
diff --git a/modules/features/trustee/mainTrustee.py b/modules/features/trustee/mainTrustee.py
index 0799fa1c..020aeda5 100644
--- a/modules/features/trustee/mainTrustee.py
+++ b/modules/features/trustee/mainTrustee.py
@@ -394,9 +394,15 @@ TEMPLATE_WORKFLOWS = [
{"id": "extract", "type": "trustee.extractFromFiles", "label": "Dokumente extrahieren", "_method": "trustee", "_action": "extractFromFiles",
"parameters": {"featureInstanceId": "{{featureInstanceId}}", "prompt": ""}, "position": {"x": 250, "y": 0}},
{"id": "process", "type": "trustee.processDocuments", "label": "Verarbeiten", "_method": "trustee", "_action": "processDocuments",
- "parameters": {"documentList": [], "featureInstanceId": "{{featureInstanceId}}"}, "position": {"x": 500, "y": 0}},
+ "parameters": {
+ "documentList": {"type": "ref", "nodeId": "extract", "path": ["documents"]},
+ "featureInstanceId": "{{featureInstanceId}}",
+ }, "position": {"x": 500, "y": 0}},
{"id": "sync", "type": "trustee.syncToAccounting", "label": "Synchronisieren", "_method": "trustee", "_action": "syncToAccounting",
- "parameters": {"documentList": [], "featureInstanceId": "{{featureInstanceId}}"}, "position": {"x": 750, "y": 0}},
+ "parameters": {
+ "documentList": {"type": "ref", "nodeId": "process", "path": ["documents"]},
+ "featureInstanceId": "{{featureInstanceId}}",
+ }, "position": {"x": 750, "y": 0}},
],
"connections": [
{"source": "trigger", "sourcePort": 0, "target": "extract", "targetPort": 0},
diff --git a/modules/features/trustee/routeFeatureTrustee.py b/modules/features/trustee/routeFeatureTrustee.py
index fbdd0966..021251fc 100644
--- a/modules/features/trustee/routeFeatureTrustee.py
+++ b/modules/features/trustee/routeFeatureTrustee.py
@@ -412,34 +412,41 @@ def get_position_options(
# ===== Organisation Routes =====
-@router.get("/{instanceId}/organisations", response_model=PaginatedResponse[TrusteeOrganisation])
+@router.get("/{instanceId}/organisations")
@limiter.limit("30/minute")
def get_organisations(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams"),
context: RequestContext = Depends(getRequestContext)
-) -> PaginatedResponse[TrusteeOrganisation]:
+):
"""Get all organisations for a feature instance with optional pagination."""
+ from modules.routes.routeHelpers import enrichRowsWithFkLabels
mandateId = _validateInstanceAccess(instanceId, context)
paginationParams = _parsePagination(pagination)
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllOrganisations(paginationParams)
+ def _toDicts(items):
+ return [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
+
if paginationParams and hasattr(result, 'items'):
- return PaginatedResponse(
- items=result.items,
- pagination=PaginationMetadata(
+ enriched = enrichRowsWithFkLabels(_toDicts(result.items), TrusteeOrganisation)
+ return {
+ "items": enriched,
+ "pagination": PaginationMetadata(
currentPage=paginationParams.page or 1,
pageSize=paginationParams.pageSize or 20,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None
- )
- )
- return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
+ ).model_dump(),
+ }
+ items = result if isinstance(result, list) else result.items
+ enriched = enrichRowsWithFkLabels(_toDicts(items), TrusteeOrganisation)
+ return {"items": enriched, "pagination": None}
@router.get("/{instanceId}/organisations/{orgId}", response_model=TrusteeOrganisation)
@@ -525,34 +532,41 @@ def delete_organisation(
# ===== Role Routes =====
-@router.get("/{instanceId}/roles", response_model=PaginatedResponse[TrusteeRole])
+@router.get("/{instanceId}/roles")
@limiter.limit("30/minute")
def get_roles(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
pagination: Optional[str] = Query(None),
context: RequestContext = Depends(getRequestContext)
-) -> PaginatedResponse[TrusteeRole]:
+):
"""Get all roles with optional pagination."""
+ from modules.routes.routeHelpers import enrichRowsWithFkLabels
mandateId = _validateInstanceAccess(instanceId, context)
paginationParams = _parsePagination(pagination)
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllRoles(paginationParams)
+ def _toDicts(items):
+ return [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
+
if paginationParams and hasattr(result, 'items'):
- return PaginatedResponse(
- items=result.items,
- pagination=PaginationMetadata(
+ enriched = enrichRowsWithFkLabels(_toDicts(result.items), TrusteeRole)
+ return {
+ "items": enriched,
+ "pagination": PaginationMetadata(
currentPage=paginationParams.page or 1,
pageSize=paginationParams.pageSize or 20,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None
- )
- )
- return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
+ ).model_dump(),
+ }
+ items = result if isinstance(result, list) else result.items
+ enriched = enrichRowsWithFkLabels(_toDicts(items), TrusteeRole)
+ return {"items": enriched, "pagination": None}
@router.get("/{instanceId}/roles/{roleId}", response_model=TrusteeRole)
@@ -638,34 +652,41 @@ def delete_role(
# ===== Access Routes =====
-@router.get("/{instanceId}/access", response_model=PaginatedResponse[TrusteeAccess])
+@router.get("/{instanceId}/access")
@limiter.limit("30/minute")
def get_all_access(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
pagination: Optional[str] = Query(None),
context: RequestContext = Depends(getRequestContext)
-) -> PaginatedResponse[TrusteeAccess]:
+):
"""Get all access records with optional pagination."""
+ from modules.routes.routeHelpers import enrichRowsWithFkLabels
mandateId = _validateInstanceAccess(instanceId, context)
paginationParams = _parsePagination(pagination)
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllAccess(paginationParams)
+ def _toDicts(items):
+ return [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
+
if paginationParams and hasattr(result, 'items'):
- return PaginatedResponse(
- items=result.items,
- pagination=PaginationMetadata(
+ enriched = enrichRowsWithFkLabels(_toDicts(result.items), TrusteeAccess)
+ return {
+ "items": enriched,
+ "pagination": PaginationMetadata(
currentPage=paginationParams.page or 1,
pageSize=paginationParams.pageSize or 20,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None
- )
- )
- return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
+ ).model_dump(),
+ }
+ items = result if isinstance(result, list) else result.items
+ enriched = enrichRowsWithFkLabels(_toDicts(items), TrusteeAccess)
+ return {"items": enriched, "pagination": None}
@router.get("/{instanceId}/access/{accessId}", response_model=TrusteeAccess)
@@ -781,34 +802,41 @@ def delete_access(
# ===== Contract Routes =====
-@router.get("/{instanceId}/contracts", response_model=PaginatedResponse[TrusteeContract])
+@router.get("/{instanceId}/contracts")
@limiter.limit("30/minute")
def get_contracts(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
pagination: Optional[str] = Query(None),
context: RequestContext = Depends(getRequestContext)
-) -> PaginatedResponse[TrusteeContract]:
+):
"""Get all contracts with optional pagination."""
+ from modules.routes.routeHelpers import enrichRowsWithFkLabels
mandateId = _validateInstanceAccess(instanceId, context)
paginationParams = _parsePagination(pagination)
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllContracts(paginationParams)
+ def _toDicts(items):
+ return [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
+
if paginationParams and hasattr(result, 'items'):
- return PaginatedResponse(
- items=result.items,
- pagination=PaginationMetadata(
+ enriched = enrichRowsWithFkLabels(_toDicts(result.items), TrusteeContract)
+ return {
+ "items": enriched,
+ "pagination": PaginationMetadata(
currentPage=paginationParams.page or 1,
pageSize=paginationParams.pageSize or 20,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None
- )
- )
- return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
+ ).model_dump(),
+ }
+ items = result if isinstance(result, list) else result.items
+ enriched = enrichRowsWithFkLabels(_toDicts(items), TrusteeContract)
+ return {"items": enriched, "pagination": None}
@router.get("/{instanceId}/contracts/{contractId}", response_model=TrusteeContract)
@@ -909,7 +937,7 @@ def delete_contract(
# ===== Document Routes =====
-@router.get("/{instanceId}/documents", response_model=PaginatedResponse[TrusteeDocument])
+@router.get("/{instanceId}/documents")
@limiter.limit("30/minute")
def get_documents(
request: Request,
@@ -918,7 +946,7 @@ def get_documents(
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
-) -> PaginatedResponse[TrusteeDocument]:
+):
"""Get all documents (metadata only) with optional pagination."""
mandateId = _validateInstanceAccess(instanceId, context)
@@ -929,19 +957,23 @@ def get_documents(
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllDocuments(paginationParams)
+ def _itemsToDicts(items):
+ return [r.model_dump() if hasattr(r, 'model_dump') else r for r in items]
+
if paginationParams and hasattr(result, 'items'):
- return PaginatedResponse(
- items=result.items,
- pagination=PaginationMetadata(
+ return {
+ "items": _itemsToDicts(result.items),
+ "pagination": PaginationMetadata(
currentPage=paginationParams.page or 1,
pageSize=paginationParams.pageSize or 20,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None
- )
- )
- return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
+ ).model_dump(),
+ }
+ items = result if isinstance(result, list) else result.items
+ return {"items": _itemsToDicts(items), "pagination": None}
def _handleDocumentMode(instanceId, mandateId, mode, column, pagination, context):
@@ -1154,7 +1186,7 @@ def delete_document(
# ===== Position Routes =====
-@router.get("/{instanceId}/positions", response_model=PaginatedResponse[TrusteePosition])
+@router.get("/{instanceId}/positions")
@limiter.limit("30/minute")
def get_positions(
request: Request,
@@ -1163,7 +1195,7 @@ def get_positions(
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
-) -> PaginatedResponse[TrusteePosition]:
+):
"""Get all positions with optional pagination."""
mandateId = _validateInstanceAccess(instanceId, context)
@@ -1174,19 +1206,23 @@ def get_positions(
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllPositions(paginationParams)
+ def _itemsToDicts(items):
+ return [r.model_dump() if hasattr(r, 'model_dump') else r for r in items]
+
if paginationParams and hasattr(result, 'items'):
- return PaginatedResponse(
- items=result.items,
- pagination=PaginationMetadata(
+ return {
+ "items": _itemsToDicts(result.items),
+ "pagination": PaginationMetadata(
currentPage=paginationParams.page or 1,
pageSize=paginationParams.pageSize or 20,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None
- )
- )
- return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
+ ).model_dump(),
+ }
+ items = result if isinstance(result, list) else result.items
+ return {"items": _itemsToDicts(items), "pagination": None}
def _handlePositionMode(instanceId, mandateId, mode, column, pagination, context):
@@ -1347,8 +1383,8 @@ def get_available_accounting_connectors(
) -> List[Dict[str, Any]]:
"""List all available accounting system connectors with their config fields."""
_validateInstanceAccess(instanceId, context)
- from .accounting.accountingRegistry import _getAccountingRegistry
- return _getAccountingRegistry().getAvailableConnectors()
+ from .accounting.accountingRegistry import getAccountingRegistry
+ return getAccountingRegistry().getAvailableConnectors()
# Placeholder returned for secret config fields so frontend can prefill form without sending real secrets.
@@ -1357,8 +1393,8 @@ _CONFIG_PLACEHOLDER = "***"
def _getConfigMasked(connectorType: str, plainConfig: Dict[str, Any]) -> Dict[str, str]:
"""Build config with secret values replaced by placeholder for GET response."""
- from .accounting.accountingRegistry import _getAccountingRegistry
- connector = _getAccountingRegistry().getConnector(connectorType)
+ from .accounting.accountingRegistry import getAccountingRegistry
+ connector = getAccountingRegistry().getConnector(connectorType)
if not connector:
return {k: (v if isinstance(v, str) else str(v)) for k, v in (plainConfig or {}).items()}
secretKeys = {f.key for f in connector.getRequiredConfigFields() if f.secret}
@@ -2081,13 +2117,13 @@ def _serializeRoleForApi(role) -> Dict[str, Any]:
return payload
-@router.get("/{instanceId}/instance-roles", response_model=PaginatedResponse)
+@router.get("/{instanceId}/instance-roles")
@limiter.limit("30/minute")
def get_instance_roles(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
context: RequestContext = Depends(getRequestContext)
-) -> PaginatedResponse:
+):
"""
Get all roles for this feature instance.
Requires feature admin permission.
@@ -2095,14 +2131,9 @@ def get_instance_roles(
mandateId = _validateInstanceAdmin(instanceId, context)
rootInterface = getRootInterface()
-
- # Get instance-specific roles (Pydantic models)
roles = rootInterface.getRolesByFeatureCode("trustee", featureInstanceId=instanceId)
- return PaginatedResponse(
- items=[_serializeRoleForApi(r) for r in roles],
- pagination=None
- )
+ return {"items": [_serializeRoleForApi(r) for r in roles], "pagination": None}
@router.get("/{instanceId}/instance-roles/{roleId}", response_model=Dict[str, Any])
@@ -2129,14 +2160,14 @@ def get_instance_role(
return _serializeRoleForApi(role)
-@router.get("/{instanceId}/instance-roles/{roleId}/rules", response_model=PaginatedResponse)
+@router.get("/{instanceId}/instance-roles/{roleId}/rules")
@limiter.limit("30/minute")
def get_instance_role_rules(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
roleId: str = Path(..., description="Role ID"),
context: RequestContext = Depends(getRequestContext)
-) -> PaginatedResponse:
+):
"""
Get all AccessRules for a specific instance role.
Requires feature admin permission.
@@ -2145,18 +2176,13 @@ def get_instance_role_rules(
rootInterface = getRootInterface()
- # Verify role belongs to this instance (Pydantic model)
role = rootInterface.getRole(roleId)
if not role or str(role.featureInstanceId) != instanceId:
raise HTTPException(status_code=404, detail=f"Role {roleId} not found in this instance")
- # Get AccessRules for this role (Pydantic models)
rules = rootInterface.getAccessRulesByRole(roleId)
- return PaginatedResponse(
- items=[r.model_dump() for r in rules],
- pagination=None
- )
+ return {"items": [r.model_dump() for r in rules], "pagination": None}
@router.post("/{instanceId}/instance-roles/{roleId}/rules", response_model=Dict[str, Any], status_code=201)
@@ -2336,6 +2362,7 @@ def _paginatedReadEndpoint(
handleFilterValuesInMemory,
handleIdsInMemory,
parseCrossFilterPagination,
+ enrichRowsWithFkLabels,
)
from fastapi.responses import JSONResponse
@@ -2401,23 +2428,28 @@ def _paginatedReadEndpoint(
featureCode=interface.FEATURE_CODE,
)
+ def _itemsToDicts(rawItems):
+ return [r.model_dump() if hasattr(r, "model_dump") else r for r in rawItems]
+
if paginationParams and hasattr(result, "items"):
- return PaginatedResponse(
- items=result.items,
- pagination=PaginationMetadata(
+ enriched = enrichRowsWithFkLabels(_itemsToDicts(result.items), modelClass)
+ return {
+ "items": enriched,
+ "pagination": PaginationMetadata(
currentPage=paginationParams.page or 1,
pageSize=paginationParams.pageSize or 20,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None,
- ),
- )
+ ).model_dump(),
+ }
items = result.items if hasattr(result, "items") else result
- return PaginatedResponse(items=items, pagination=None)
+ enriched = enrichRowsWithFkLabels(_itemsToDicts(items), modelClass)
+ return {"items": enriched, "pagination": None}
-@router.get("/{instanceId}/data/accounts", response_model=PaginatedResponse[TrusteeDataAccount])
+@router.get("/{instanceId}/data/accounts")
@limiter.limit("30/minute")
def get_data_accounts(
request: Request,
@@ -2438,7 +2470,7 @@ def get_data_accounts(
)
-@router.get("/{instanceId}/data/journal-entries", response_model=PaginatedResponse[TrusteeDataJournalEntry])
+@router.get("/{instanceId}/data/journal-entries")
@limiter.limit("30/minute")
def get_data_journal_entries(
request: Request,
@@ -2459,7 +2491,7 @@ def get_data_journal_entries(
)
-@router.get("/{instanceId}/data/journal-lines", response_model=PaginatedResponse[TrusteeDataJournalLine])
+@router.get("/{instanceId}/data/journal-lines")
@limiter.limit("30/minute")
def get_data_journal_lines(
request: Request,
@@ -2480,7 +2512,7 @@ def get_data_journal_lines(
)
-@router.get("/{instanceId}/data/contacts", response_model=PaginatedResponse[TrusteeDataContact])
+@router.get("/{instanceId}/data/contacts")
@limiter.limit("30/minute")
def get_data_contacts(
request: Request,
@@ -2501,7 +2533,7 @@ def get_data_contacts(
)
-@router.get("/{instanceId}/data/account-balances", response_model=PaginatedResponse[TrusteeDataAccountBalance])
+@router.get("/{instanceId}/data/account-balances")
@limiter.limit("30/minute")
def get_data_account_balances(
request: Request,
@@ -2522,7 +2554,7 @@ def get_data_account_balances(
)
-@router.get("/{instanceId}/accounting/configs", response_model=PaginatedResponse[TrusteeAccountingConfig])
+@router.get("/{instanceId}/accounting/configs")
@limiter.limit("30/minute")
def get_accounting_configs(
request: Request,
@@ -2548,7 +2580,7 @@ def get_accounting_configs(
)
-@router.get("/{instanceId}/accounting/syncs", response_model=PaginatedResponse[TrusteeAccountingSync])
+@router.get("/{instanceId}/accounting/syncs")
@limiter.limit("30/minute")
def get_accounting_syncs(
request: Request,
diff --git a/modules/features/workspace/routeFeatureWorkspace.py b/modules/features/workspace/routeFeatureWorkspace.py
index 1c44d54d..96313293 100644
--- a/modules/features/workspace/routeFeatureWorkspace.py
+++ b/modules/features/workspace/routeFeatureWorkspace.py
@@ -191,7 +191,7 @@ _SOURCE_TYPE_TO_SERVICE = {
}
-def _buildDataSourceContext(chatService, dataSourceIds: List[str]) -> str:
+def buildDataSourceContext(chatService, dataSourceIds: List[str]) -> str:
"""Build a description of active data sources for the agent prompt."""
parts = [
"The user has attached the following external data sources to this prompt.",
@@ -229,7 +229,7 @@ def _buildDataSourceContext(chatService, dataSourceIds: List[str]) -> str:
return "\n".join(parts) if found else ""
-def _buildFeatureDataSourceContext(featureDataSourceIds: List[str]) -> str:
+def buildFeatureDataSourceContext(featureDataSourceIds: List[str]) -> str:
"""Build a description of attached feature data sources for the agent prompt."""
from modules.datamodels.datamodelFeatureDataSource import FeatureDataSource
from modules.security.rbacCatalog import getCatalogService
@@ -735,12 +735,12 @@ async def _runWorkspaceAgent(
enrichedPrompt = prompt
if dataSourceIds:
- dsInfo = _buildDataSourceContext(chatService, dataSourceIds)
+ dsInfo = buildDataSourceContext(chatService, dataSourceIds)
if dsInfo:
enrichedPrompt = f"{prompt}\n\n[Active Data Sources]\n{dsInfo}"
if featureDataSourceIds:
- fdsInfo = _buildFeatureDataSourceContext(featureDataSourceIds)
+ fdsInfo = buildFeatureDataSourceContext(featureDataSourceIds)
if fdsInfo:
enrichedPrompt = f"{enrichedPrompt}\n\n[Attached Feature Data Sources]\n{fdsInfo}"
diff --git a/modules/interfaces/interfaceBootstrap.py b/modules/interfaces/interfaceBootstrap.py
index 3e8bf4ea..a6ae0052 100644
--- a/modules/interfaces/interfaceBootstrap.py
+++ b/modules/interfaces/interfaceBootstrap.py
@@ -139,7 +139,7 @@ def _bootstrapBilling() -> None:
Idempotent: only creates missing settings/accounts.
"""
try:
- from modules.interfaces.interfaceDbBilling import _getRootInterface as getBillingRootInterface
+ from modules.interfaces.interfaceDbBilling import getRootInterface as getBillingRootInterface
billingInterface = getBillingRootInterface()
@@ -1968,11 +1968,11 @@ def initRootMandateBilling(mandateId: str) -> None:
Creates mandate pool account and user audit accounts.
"""
try:
- from modules.interfaces.interfaceDbBilling import _getRootInterface
+ from modules.interfaces.interfaceDbBilling import getRootInterface as getBillingRootInterface
from modules.interfaces.interfaceDbApp import getRootInterface as getAppRootInterface
from modules.datamodels.datamodelBilling import BillingSettings
- billingInterface = _getRootInterface()
+ billingInterface = getBillingRootInterface()
appInterface = getAppRootInterface()
existingSettings = billingInterface.getSettings(mandateId)
@@ -2012,7 +2012,7 @@ def _initRootMandateSubscription(mandateId: str) -> None:
Called during bootstrap after billing init.
"""
try:
- from modules.interfaces.interfaceDbSubscription import _getRootInterface as getSubRootInterface
+ from modules.interfaces.interfaceDbSubscription import getRootInterface as getSubRootInterface
from modules.datamodels.datamodelSubscription import (
MandateSubscription,
SubscriptionStatusEnum,
diff --git a/modules/interfaces/interfaceDbApp.py b/modules/interfaces/interfaceDbApp.py
index c754684f..d1593473 100644
--- a/modules/interfaces/interfaceDbApp.py
+++ b/modules/interfaces/interfaceDbApp.py
@@ -15,7 +15,7 @@ from typing import Dict, Any, List, Optional, Union
from passlib.context import CryptContext
import uuid
-from modules.connectors.connectorDbPostgre import DatabaseConnector, _get_cached_connector
+from modules.connectors.connectorDbPostgre import DatabaseConnector, getCachedConnector
from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
@@ -143,7 +143,7 @@ class AppObjects:
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
- self.db = _get_cached_connector(
+ self.db = getCachedConnector(
dbHost=dbHost,
dbDatabase=dbDatabase,
dbUser=dbUser,
@@ -1594,8 +1594,8 @@ class AppObjects:
if not adminRoleId:
raise ValueError(f"No admin role found for mandate {mandateId} — cannot assign user without role")
- from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
- from modules.interfaces.interfaceDbBilling import _getRootInterface as _getBillingRoot
+ from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
+ from modules.interfaces.interfaceDbBilling import getRootInterface as _getBillingRoot
from datetime import datetime, timezone, timedelta
now = datetime.now(timezone.utc)
@@ -1693,7 +1693,7 @@ class AppObjects:
from modules.datamodels.datamodelSubscription import (
SubscriptionStatusEnum, BUILTIN_PLANS,
)
- from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
+ from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
from datetime import datetime, timezone, timedelta
activated = 0
@@ -1936,7 +1936,7 @@ class AppObjects:
logger.info(f"Cascade: deleted {len(memberships)} UserMandates for mandate {mandateId}")
# 3. Cancel Stripe subscriptions + delete MandateSubscription records (poweron_billing)
- from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
+ from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
subInterface = _getSubRoot()
subs = subInterface.listForMandate(mandateId)
for sub in subs:
@@ -1954,7 +1954,7 @@ class AppObjects:
logger.info(f"Cascade: deleted {len(subs)} subscriptions for mandate {mandateId}")
# 3b. Delete Billing data (poweron_billing)
- from modules.interfaces.interfaceDbBilling import _getRootInterface as _getBillingRoot
+ from modules.interfaces.interfaceDbBilling import getRootInterface as _getBillingRoot
billingDb = _getBillingRoot().db
billingAccounts = billingDb.getRecordset(BillingAccount, recordFilter={"mandateId": mandateId})
for acc in billingAccounts:
@@ -2202,7 +2202,7 @@ class AppObjects:
Balance is always on the mandate pool (PREPAY_MANDATE). User accounts are for audit trail only.
"""
try:
- from modules.interfaces.interfaceDbBilling import _getRootInterface as getBillingRootInterface
+ from modules.interfaces.interfaceDbBilling import getRootInterface as getBillingRootInterface
billingInterface = getBillingRootInterface()
settings = billingInterface.getSettings(mandateId)
diff --git a/modules/interfaces/interfaceDbBilling.py b/modules/interfaces/interfaceDbBilling.py
index a4af7b25..db1ee619 100644
--- a/modules/interfaces/interfaceDbBilling.py
+++ b/modules/interfaces/interfaceDbBilling.py
@@ -134,7 +134,7 @@ def getInterface(currentUser: User, mandateId: str = None) -> "BillingObjects":
return _billingInterfaces[cacheKey]
-def _getRootInterface() -> "BillingObjects":
+def getRootInterface() -> "BillingObjects":
"""Get interface with system access for bootstrap operations."""
from modules.security.rootAccess import getRootUser
rootUser = getRootUser()
@@ -888,7 +888,7 @@ class BillingObjects:
prev = self._parseSettingsDateTime(settings.get("storagePeriodStartAt"))
if prev is not None and abs((prev - periodStartAt).total_seconds()) < 2:
return
- from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
+ from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
usedMB = float(_getSubRoot().getMandateDataVolumeMB(mandateId))
self.updateSettings(
@@ -911,13 +911,13 @@ class BillingObjects:
settings = self.getSettings(mandateId)
if not settings:
return None
- from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
- from modules.datamodels.datamodelSubscription import _getPlan
+ from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
+ from modules.datamodels.datamodelSubscription import getPlan
subIface = _getSubRoot()
usedMB = float(subIface.getMandateDataVolumeMB(mandateId))
sub = subIface.getOperativeForMandate(mandateId)
- plan = _getPlan(sub.get("planKey", "")) if sub else None
+ plan = getPlan(sub.get("planKey", "")) if sub else None
includedMB = plan.maxDataVolumeMB if plan and plan.maxDataVolumeMB is not None else None
if includedMB is None:
return None
@@ -971,13 +971,13 @@ class BillingObjects:
Amount = budgetAiPerUserCHF * activeUsers (dynamic, not the static plan.budgetAiCHF).
Should be called once per billing period (initial activation + each invoice.paid).
Returns the created CREDIT transaction or None if budget is 0."""
- from modules.datamodels.datamodelSubscription import _getPlan
+ from modules.datamodels.datamodelSubscription import getPlan
- plan = _getPlan(planKey)
+ plan = getPlan(planKey)
if not plan or not plan.budgetAiPerUserCHF or plan.budgetAiPerUserCHF <= 0:
return None
- from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
+ from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
subRoot = _getSubRoot()
activeUsers = max(subRoot.countActiveUsers(mandateId), 1)
amount = plan.budgetAiPerUserCHF * activeUsers
@@ -1027,13 +1027,13 @@ class BillingObjects:
delta > 0: user added -> CREDIT pro-rata portion
delta < 0: user removed -> DEBIT pro-rata portion
"""
- from modules.datamodels.datamodelSubscription import _getPlan
+ from modules.datamodels.datamodelSubscription import getPlan
- plan = _getPlan(planKey)
+ plan = getPlan(planKey)
if not plan or not plan.budgetAiPerUserCHF or plan.budgetAiPerUserCHF <= 0:
return None
- from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
+ from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
subRoot = _getSubRoot()
operative = subRoot.getOperativeForMandate(mandateId)
if not operative:
@@ -1221,7 +1221,7 @@ class BillingObjects:
if not mandate or not getattr(mandate, "enabled", True):
continue
- mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
+ mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None) or f"NA({mandateId})"
settings = self.getSettings(mandateId)
if not settings:
@@ -1280,13 +1280,12 @@ class BillingObjects:
if not userAccount:
continue
- # Get transactions for user's account (all transactions are on user accounts now)
transactions = self.getTransactions(userAccount["id"], limit=limit)
mandate = appInterface.getMandate(mandateId)
- mandateName = ""
+ mandateName = f"NA({mandateId})"
if mandate:
- mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
+ mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None) or f"NA({mandateId})"
for t in transactions:
t["mandateId"] = mandateId
@@ -1333,9 +1332,9 @@ class BillingObjects:
continue
mandate = appInterface.getMandate(mandateId)
- mandateName = ""
+ mandateName = f"NA({mandateId})"
if mandate:
- mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
+ mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None) or f"NA({mandateId})"
allMandateAccounts = self.db.getRecordset(
BillingAccount,
@@ -1387,11 +1386,10 @@ class BillingObjects:
for mandateId in targetMandateIds:
transactions = self.getTransactionsByMandate(mandateId, limit=limit)
- # Get mandate name
mandate = appInterface.getMandate(mandateId)
- mandateName = ""
+ mandateName = f"NA({mandateId})"
if mandate:
- mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
+ mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None) or f"NA({mandateId})"
for t in transactions:
t["mandateId"] = mandateId
@@ -1439,7 +1437,6 @@ class BillingObjects:
for s in allSettings:
settingsMap[s.get("mandateId")] = s
- # Get user info efficiently
userIds = list(set(acc.get("userId") for acc in allAccounts if acc.get("userId")))
userMap = {}
for userId in userIds:
@@ -1447,16 +1444,15 @@ class BillingObjects:
if user:
displayName = getattr(user, 'displayName', None) or (user.get("displayName") if isinstance(user, dict) else None)
username = getattr(user, 'username', None) or (user.get("username") if isinstance(user, dict) else None)
- userMap[userId] = displayName or username or userId
+ userMap[userId] = displayName or username or f"NA({userId})"
- # Get mandate info efficiently
mandateMap = {}
mandateIdList = list(set(acc.get("mandateId") for acc in allAccounts if acc.get("mandateId")))
for mandateId in mandateIdList:
mandate = appInterface.getMandate(mandateId)
if mandate:
- mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
- mandateMap[mandateId] = mandateName
+ mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None)
+ mandateMap[mandateId] = mandateName or f"NA({mandateId})"
for account in allAccounts:
mandateId = account.get("mandateId")
@@ -1475,9 +1471,9 @@ class BillingObjects:
balances.append({
"accountId": account.get("id"),
"mandateId": mandateId,
- "mandateName": mandateMap.get(mandateId, ""),
+ "mandateName": mandateMap.get(mandateId) or (f"NA({mandateId})" if mandateId else None),
"userId": userId,
- "userName": userMap.get(userId, userId),
+ "userName": userMap.get(userId) or (f"NA({userId})" if userId else None),
"balance": balance,
"warningThreshold": warningThreshold,
"isWarning": balance <= warningThreshold,
@@ -1596,14 +1592,14 @@ class BillingObjects:
if pageUserIds:
users = appInterface.getUsersByIds(list(pageUserIds))
for uid, u in users.items():
- dn = getattr(u, "displayName", None) or getattr(u, "username", None) or uid
+ dn = getattr(u, "displayName", None) or getattr(u, "username", None) or f"NA({uid})"
userMap[uid] = dn
mandateMap: Dict[str, str] = {}
if pageMandateIds:
mandates = appInterface.getMandatesByIds(list(pageMandateIds))
for mid, m in mandates.items():
- mandateMap[mid] = getattr(m, "label", None) or getattr(m, "name", None) or mid
+ mandateMap[mid] = getattr(m, "label", None) or getattr(m, "name", None) or f"NA({mid})"
enriched = []
for t in pageItems:
@@ -1613,9 +1609,9 @@ class BillingObjects:
mid = acc.get("mandateId")
txUserId = row.get("createdByUserId") or acc.get("userId")
row["mandateId"] = mid
- row["mandateName"] = mandateMap.get(mid, "")
+ row["mandateName"] = mandateMap.get(mid) or (f"NA({mid})" if mid else None)
row["userId"] = txUserId
- row["userName"] = userMap.get(txUserId, txUserId) if txUserId else None
+ row["userName"] = userMap.get(txUserId) or (f"NA({txUserId})" if txUserId else None)
enriched.append(row)
return PaginatedResult(items=enriched, totalItems=totalItems, totalPages=totalPages)
@@ -1639,12 +1635,12 @@ class BillingObjects:
first, then builds a single SQL query with OR-combined conditions.
"""
import math
- from modules.connectors.connectorDbPostgre import _get_model_fields, _parseRecordFields
+ from modules.connectors.connectorDbPostgre import getModelFields, parseRecordFields
from modules.datamodels.datamodelUam import UserInDB
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
table = BillingTransaction.__name__
- fields = _get_model_fields(BillingTransaction)
+ fields = getModelFields(BillingTransaction)
pattern = f"%{searchTerm}%"
# Resolve matching user / mandate IDs via the app DB (which is separate
@@ -1785,7 +1781,7 @@ class BillingObjects:
records = [dict(row) for row in cur.fetchall()]
for rec in records:
- _parseRecordFields(rec, fields, f"search table {table}")
+ parseRecordFields(rec, fields, f"search table {table}")
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
return {"items": records, "totalItems": totalItems, "totalPages": totalPages}
@@ -2023,7 +2019,7 @@ class BillingObjects:
appInterface = getAppInterface(self.currentUser)
mandates = appInterface.getMandatesByIds(mandateIds)
return sorted(
- {getattr(m, "label", None) or getattr(m, "name", "") or mid for mid, m in mandates.items()},
+ {getattr(m, "label", None) or getattr(m, "name", None) or f"NA({mid})" for mid, m in mandates.items()},
key=lambda v: v.lower(),
)
@@ -2035,7 +2031,7 @@ class BillingObjects:
appInterface = getAppInterface(self.currentUser)
users = appInterface.getUsersByIds(values)
return sorted(
- {getattr(u, "displayName", None) or getattr(u, "username", None) or uid for uid, u in users.items()},
+ {getattr(u, "displayName", None) or getattr(u, "username", None) or f"NA({uid})" for uid, u in users.items()},
key=lambda v: v.lower(),
)
@@ -2075,7 +2071,6 @@ class BillingObjects:
"userId": acc.get("userId")
}
- # Get user info efficiently
userIds = list(set(acc.get("userId") for acc in allAccounts if acc.get("userId")))
userMap = {}
for userId in userIds:
@@ -2083,16 +2078,15 @@ class BillingObjects:
if user:
displayName = getattr(user, 'displayName', None) or (user.get("displayName") if isinstance(user, dict) else None)
username = getattr(user, 'username', None) or (user.get("username") if isinstance(user, dict) else None)
- userMap[userId] = displayName or username or userId
+ userMap[userId] = displayName or username or f"NA({userId})"
- # Get mandate info efficiently
mandateMap = {}
mandateIdList = list(set(acc.get("mandateId") for acc in allAccounts if acc.get("mandateId")))
for mandateId in mandateIdList:
mandate = appInterface.getMandate(mandateId)
if mandate:
- mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
- mandateMap[mandateId] = mandateName
+ mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None)
+ mandateMap[mandateId] = mandateName or f"NA({mandateId})"
# Get transactions for all accounts and collect createdByUserIds
rawTransactions = []
@@ -2123,18 +2117,16 @@ class BillingObjects:
if user:
displayName = getattr(user, 'displayName', None) or (user.get("displayName") if isinstance(user, dict) else None)
username = getattr(user, 'username', None) or (user.get("username") if isinstance(user, dict) else None)
- userMap[uid] = displayName or username or uid
+ userMap[uid] = displayName or username or f"NA({uid})"
- # Enrich transactions
for t in rawTransactions:
mandateId = t.pop("_accountMandateId", None)
accountUserId = t.pop("_accountUserId", None)
t["mandateId"] = mandateId
- t["mandateName"] = mandateMap.get(mandateId, "")
- # Prefer createdByUserId (per-transaction) over account-derived userId
+ t["mandateName"] = mandateMap.get(mandateId) or (f"NA({mandateId})" if mandateId else None)
txUserId = t.get("createdByUserId") or accountUserId
t["userId"] = txUserId
- t["userName"] = userMap.get(txUserId, txUserId) if txUserId else None
+ t["userName"] = userMap.get(txUserId) or (f"NA({txUserId})" if txUserId else None)
allTransactions.append(t)
except Exception as e:
diff --git a/modules/interfaces/interfaceDbChat.py b/modules/interfaces/interfaceDbChat.py
index be097263..1b7ec59a 100644
--- a/modules/interfaces/interfaceDbChat.py
+++ b/modules/interfaces/interfaceDbChat.py
@@ -62,13 +62,13 @@ def storeDebugMessageAndDocuments(message, currentUser, mandateId=None, featureI
try:
import os
from datetime import datetime, UTC
- from modules.shared.debugLogger import _getBaseDebugDir, _ensureDir
+ from modules.shared.debugLogger import getBaseDebugDir, ensureDir
from modules.interfaces.interfaceDbManagement import getInterface
# Create base debug directory (use base debug dir, not prompts subdirectory)
- baseDebugDir = _getBaseDebugDir()
+ baseDebugDir = getBaseDebugDir()
debug_root = os.path.join(baseDebugDir, 'messages')
- _ensureDir(debug_root)
+ ensureDir(debug_root)
# Generate timestamp
timestamp = datetime.now(UTC).strftime('%Y%m%d-%H%M%S-%f')[:-3]
@@ -133,7 +133,7 @@ def storeDebugMessageAndDocuments(message, currentUser, mandateId=None, featureI
safe_label = "default"
label_folder = os.path.join(message_path, safe_label)
- _ensureDir(label_folder)
+ ensureDir(label_folder)
# Store each document
for i, doc in enumerate(docs):
diff --git a/modules/interfaces/interfaceDbKnowledge.py b/modules/interfaces/interfaceDbKnowledge.py
index a12ac048..f819615e 100644
--- a/modules/interfaces/interfaceDbKnowledge.py
+++ b/modules/interfaces/interfaceDbKnowledge.py
@@ -11,7 +11,7 @@ from collections import defaultdict
from datetime import datetime, timezone, timedelta
from typing import Dict, Any, List, Optional
-from modules.connectors.connectorDbPostgre import _get_cached_connector
+from modules.connectors.connectorDbPostgre import getCachedConnector
from modules.shared.dbRegistry import registerDatabase
from modules.datamodels.datamodelKnowledge import FileContentIndex, ContentChunk, RoundMemory, WorkflowMemory
from modules.datamodels.datamodelUam import User
@@ -43,7 +43,7 @@ class KnowledgeObjects:
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
- self.db = _get_cached_connector(
+ self.db = getCachedConnector(
dbHost=dbHost,
dbDatabase=dbDatabase,
dbUser=dbUser,
@@ -103,9 +103,9 @@ class KnowledgeObjects:
ok = self.db.recordDelete(FileContentIndex, fileId)
if ok and mandateId:
try:
- from modules.interfaces.interfaceDbBilling import _getRootInterface
+ from modules.interfaces.interfaceDbBilling import getRootInterface
- _getRootInterface().reconcileMandateStorageBilling(str(mandateId))
+ getRootInterface().reconcileMandateStorageBilling(str(mandateId))
except Exception as ex:
logger.warning("reconcileMandateStorageBilling after delete failed: %s", ex)
return ok
diff --git a/modules/interfaces/interfaceDbManagement.py b/modules/interfaces/interfaceDbManagement.py
index cca98ffa..e6cee0b8 100644
--- a/modules/interfaces/interfaceDbManagement.py
+++ b/modules/interfaces/interfaceDbManagement.py
@@ -13,7 +13,7 @@ import math
import mimetypes
from typing import Dict, Any, List, Optional, Union
-from modules.connectors.connectorDbPostgre import DatabaseConnector, _get_cached_connector
+from modules.connectors.connectorDbPostgre import DatabaseConnector, getCachedConnector
from modules.shared.dbRegistry import registerDatabase
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC, getRecordsetPaginatedWithRBAC
from modules.security.rbac import RbacClass
@@ -136,7 +136,7 @@ class ComponentObjects:
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
- self.db = _get_cached_connector(
+ self.db = getCachedConnector(
dbHost=dbHost,
dbDatabase=dbDatabase,
dbUser=dbUser,
@@ -992,8 +992,11 @@ class ComponentObjects:
if file.get("neutralize") is None:
file["neutralize"] = False
+ labelCols = {k: v for k, v in file.items() if k.endswith("Label")}
fileItem = FileItem(**file)
- fileItems.append(fileItem)
+ itemDict = fileItem.model_dump()
+ itemDict.update(labelCols)
+ fileItems.append(itemDict)
except Exception as e:
logger.warning(f"Skipping invalid file record: {str(e)}")
continue
@@ -1347,8 +1350,8 @@ class ComponentObjects:
folderIds = [f["id"] for f in folders if f.get("id")]
fileCounts: Dict[str, int] = {}
try:
- from modules.interfaces.interfaceRbac import _buildFilesScopeWhereClause
- scopeClause = _buildFilesScopeWhereClause(
+ from modules.interfaces.interfaceRbac import buildFilesScopeWhereClause
+ scopeClause = buildFilesScopeWhereClause(
self.currentUser, "FileItem", self.db,
self.mandateId, self.featureInstanceId,
[], [],
diff --git a/modules/interfaces/interfaceDbSubscription.py b/modules/interfaces/interfaceDbSubscription.py
index 05d83a58..a09fe93f 100644
--- a/modules/interfaces/interfaceDbSubscription.py
+++ b/modules/interfaces/interfaceDbSubscription.py
@@ -25,7 +25,7 @@ from modules.datamodels.datamodelSubscription import (
TERMINAL_STATUSES,
OPERATIVE_STATUSES,
BUILTIN_PLANS,
- _getPlan,
+ getPlan as getPlanFromCatalog,
_getSelectablePlans,
)
@@ -55,7 +55,7 @@ def getInterface(currentUser: User, mandateId: str = None) -> "SubscriptionObjec
return _subscriptionInterfaces[cacheKey]
-def _getRootInterface() -> "SubscriptionObjects":
+def getRootInterface() -> "SubscriptionObjects":
from modules.security.rootAccess import getRootUser
return SubscriptionObjects(getRootUser(), mandateId=None)
@@ -96,7 +96,7 @@ class SubscriptionObjects:
# =========================================================================
def getPlan(self, planKey: str) -> Optional[SubscriptionPlan]:
- return _getPlan(planKey)
+ return getPlanFromCatalog(planKey)
def getSelectablePlans(self) -> List[SubscriptionPlan]:
return _getSelectablePlans()
diff --git a/modules/interfaces/interfaceRbac.py b/modules/interfaces/interfaceRbac.py
index 14953ef1..13bdfcba 100644
--- a/modules/interfaces/interfaceRbac.py
+++ b/modules/interfaces/interfaceRbac.py
@@ -247,8 +247,8 @@ def getRecordsetWithRBAC(
# Handle JSONB fields and ensure numeric types are correct
# Import the helper function from connector module
- from modules.connectors.connectorDbPostgre import _get_model_fields
- fields = _get_model_fields(modelClass)
+ from modules.connectors.connectorDbPostgre import getModelFields
+ fields = getModelFields(modelClass)
for record in records:
for fieldName, fieldType in fields.items():
# Ensure numeric fields are properly typed
@@ -379,8 +379,8 @@ def getRecordsetPaginatedWithRBAC(
whereValues.append(value)
if pagination and pagination.filters:
- from modules.connectors.connectorDbPostgre import _get_model_fields
- fields = _get_model_fields(modelClass)
+ from modules.connectors.connectorDbPostgre import getModelFields
+ fields = getModelFields(modelClass)
validColumns = set(fields.keys())
for key, val in pagination.filters.items():
if key == "search" and isinstance(val, str) and val.strip():
@@ -440,8 +440,8 @@ def getRecordsetPaginatedWithRBAC(
orderParts: List[str] = []
if pagination and pagination.sort:
- from modules.connectors.connectorDbPostgre import _get_model_fields
- validColumns = set(_get_model_fields(modelClass).keys())
+ from modules.connectors.connectorDbPostgre import getModelFields
+ validColumns = set(getModelFields(modelClass).keys())
for sf in pagination.sort:
if sf.field in validColumns:
direction = "DESC" if sf.direction.lower() == "desc" else "ASC"
@@ -464,10 +464,10 @@ def getRecordsetPaginatedWithRBAC(
cursor.execute(dataSql, whereValues)
records = [dict(row) for row in cursor.fetchall()]
- from modules.connectors.connectorDbPostgre import _get_model_fields, _parseRecordFields
- fields = _get_model_fields(modelClass)
+ from modules.connectors.connectorDbPostgre import getModelFields, parseRecordFields
+ fields = getModelFields(modelClass)
for record in records:
- _parseRecordFields(record, fields, f"table {table}")
+ parseRecordFields(record, fields, f"table {table}")
for fieldName, fieldType in fields.items():
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
modelFields = modelClass.model_fields
@@ -484,12 +484,15 @@ def getRecordsetPaginatedWithRBAC(
if enrichPermissions:
records = _enrichRecordsWithPermissions(records, permissions, currentUser)
- if pagination:
- pageSize = pagination.pageSize
- totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
- return PaginatedResult(items=records, totalItems=totalItems, totalPages=totalPages)
+ from modules.routes.routeHelpers import enrichRowsWithFkLabels
+ enrichRowsWithFkLabels(records, modelClass)
- return records
+ if pagination:
+ pageSize = pagination.pageSize
+ totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
+ return PaginatedResult(items=records, totalItems=totalItems, totalPages=totalPages)
+
+ return records
except Exception as e:
logger.error(f"Error in getRecordsetPaginatedWithRBAC for table {table}: {e}")
return PaginatedResult(items=[], totalItems=0, totalPages=0) if pagination else []
@@ -518,8 +521,8 @@ def getDistinctColumnValuesWithRBAC(
if not connector._ensureTableExists(modelClass):
return []
- from modules.connectors.connectorDbPostgre import _get_model_fields
- fields = _get_model_fields(modelClass)
+ from modules.connectors.connectorDbPostgre import getModelFields
+ fields = getModelFields(modelClass)
if column not in fields:
return []
@@ -614,21 +617,34 @@ def getDistinctColumnValuesWithRBAC(
whereClause = " WHERE " + " AND ".join(whereConditions) if whereConditions else ""
notNullCond = f'"{column}" IS NOT NULL AND "{column}"::TEXT != \'\''
if whereClause:
- whereClause += f" AND {notNullCond}"
+ nonNullWhere = whereClause + f" AND {notNullCond}"
else:
- whereClause = f" WHERE {notNullCond}"
+ nonNullWhere = f" WHERE {notNullCond}"
- sql = f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{whereClause} ORDER BY val'
+ sql = f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{nonNullWhere} ORDER BY val'
with connector.connection.cursor() as cursor:
cursor.execute(sql, whereValues)
- return [row["val"] for row in cursor.fetchall()]
+ result = [row["val"] for row in cursor.fetchall()]
+
+ # Include a None entry when NULL/empty rows exist (enables "(Leer)" filter)
+ emptyCond = f'("{column}" IS NULL OR "{column}"::TEXT = \'\')'
+ if whereClause:
+ emptySql = f'SELECT 1 FROM "{table}"{whereClause} AND {emptyCond} LIMIT 1'
+ else:
+ emptySql = f'SELECT 1 FROM "{table}" WHERE {emptyCond} LIMIT 1'
+ with connector.connection.cursor() as cursor:
+ cursor.execute(emptySql, whereValues)
+ if cursor.fetchone():
+ result.append(None)
+
+ return result
except Exception as e:
logger.error(f"Error in getDistinctColumnValuesWithRBAC for {table}.{column}: {e}")
return []
-def _buildFilesScopeWhereClause(
+def buildFilesScopeWhereClause(
currentUser: User,
table: str,
connector,
@@ -673,7 +689,7 @@ def _buildFilesScopeWhereClause(
if instances:
effectiveMandateId = instances[0].get("mandateId") or ""
except Exception as e:
- logger.warning(f"_buildFilesScopeWhereClause: could not resolve mandate for instance {featureInstanceId}: {e}")
+ logger.warning(f"buildFilesScopeWhereClause: could not resolve mandate for instance {featureInstanceId}: {e}")
scopeParts: List[str] = []
scopeValues: List = []
@@ -757,7 +773,7 @@ def buildRbacWhereClause(
namespaceAll = TABLE_NAMESPACE.get(table, "system")
# Files: scope-based context filtering applies even with ALL access
if namespaceAll == "files":
- return _buildFilesScopeWhereClause(
+ return buildFilesScopeWhereClause(
currentUser, table, connector, mandateId, featureInstanceId,
baseConditions, baseValues,
)
@@ -811,7 +827,7 @@ def buildRbacWhereClause(
# - scope='featureInstance' → visible to users with access to that instance
# - scope='personal' → only visible to owner (sysCreatedBy)
if namespace == "files":
- return _buildFilesScopeWhereClause(
+ return buildFilesScopeWhereClause(
currentUser, table, connector, mandateId, featureInstanceId,
baseConditions, baseValues,
)
diff --git a/modules/routes/routeAdminDemoConfig.py b/modules/routes/routeAdminDemoConfig.py
index d893c205..db37e775 100644
--- a/modules/routes/routeAdminDemoConfig.py
+++ b/modules/routes/routeAdminDemoConfig.py
@@ -28,9 +28,9 @@ def listDemoConfigs(
currentUser: User = Depends(requirePlatformAdmin),
) -> dict:
"""List all available demo configurations."""
- from modules.demoConfigs import _getAvailableDemoConfigs
+ from modules.demoConfigs import getAvailableDemoConfigs
- configs = _getAvailableDemoConfigs()
+ configs = getAvailableDemoConfigs()
return {
"configs": [cfg.toDict() for cfg in configs.values()],
}
@@ -44,9 +44,9 @@ def loadDemoConfig(
currentUser: User = Depends(requirePlatformAdmin),
) -> dict:
"""Load (create) a demo configuration. Idempotent."""
- from modules.demoConfigs import _getDemoConfigByCode
+ from modules.demoConfigs import getDemoConfigByCode
- config = _getDemoConfigByCode(code)
+ config = getDemoConfigByCode(code)
if not config:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
@@ -69,9 +69,9 @@ def removeDemoConfig(
currentUser: User = Depends(requirePlatformAdmin),
) -> dict:
"""Remove all data created by a demo configuration."""
- from modules.demoConfigs import _getDemoConfigByCode
+ from modules.demoConfigs import getDemoConfigByCode
- config = _getDemoConfigByCode(code)
+ config = getDemoConfigByCode(code)
if not config:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
diff --git a/modules/routes/routeAdminFeatures.py b/modules/routes/routeAdminFeatures.py
index 66682464..9634dd0d 100644
--- a/modules/routes/routeAdminFeatures.py
+++ b/modules/routes/routeAdminFeatures.py
@@ -18,7 +18,7 @@ import json
import math
from pydantic import BaseModel, Field
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
-from modules.routes.routeHelpers import _applyFiltersAndSort, handleFilterValuesInMemory, handleIdsInMemory
+from modules.routes.routeHelpers import applyFiltersAndSort, handleFilterValuesInMemory, handleIdsInMemory
from modules.auth import limiter, getRequestContext, RequestContext, requirePlatformAdmin
from modules.datamodels.datamodelUam import User, UserInDB
@@ -481,7 +481,7 @@ def list_feature_instances(
return handleIdsInMemory(items, pagination)
if paginationParams:
- filtered = _applyFiltersAndSort(items, paginationParams)
+ filtered = applyFiltersAndSort(items, paginationParams)
totalItems = len(filtered)
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
@@ -1019,7 +1019,7 @@ def list_template_roles(
if mode == "ids":
return handleIdsInMemory(enriched, pagination)
- filtered = _applyFiltersAndSort(enriched, paginationParams)
+ filtered = applyFiltersAndSort(enriched, paginationParams)
if paginationParams:
totalItems = len(filtered)
@@ -1223,7 +1223,7 @@ def list_feature_instance_users(
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
if paginationParams:
- filtered = _applyFiltersAndSort(items, paginationParams)
+ filtered = applyFiltersAndSort(items, paginationParams)
totalItems = len(filtered)
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
diff --git a/modules/routes/routeAudit.py b/modules/routes/routeAudit.py
index 3634ff9d..0e686297 100644
--- a/modules/routes/routeAudit.py
+++ b/modules/routes/routeAudit.py
@@ -69,14 +69,24 @@ def _applySortFilterSearch(
return items
-def _distinctColumnValues(items: List[Dict[str, Any]], column: str) -> List[str]:
- """Extract sorted distinct non-empty string values for a column."""
+def _distinctColumnValues(items: List[Dict[str, Any]], column: str) -> List[Optional[str]]:
+ """Extract sorted distinct values for a column.
+
+ Includes ``None`` as the last entry when at least one row has a null/empty
+ value — this enables the "(Leer)" filter option in the frontend.
+ """
vals = set()
+ hasEmpty = False
for r in items:
v = r.get(column)
- if v is not None and v != "":
- vals.add(str(v))
- return sorted(vals)
+ if v is None or v == "":
+ hasEmpty = True
+ continue
+ vals.add(str(v))
+ result: List[Optional[str]] = sorted(vals)
+ if hasEmpty:
+ result.append(None)
+ return result
def _enrichUserAndInstanceLabels(
@@ -87,46 +97,32 @@ def _enrichUserAndInstanceLabels(
instanceKey: str = "featureInstanceId",
instanceLabelKey: str = "instanceLabel",
) -> None:
- """Resolve userId → username and featureInstanceId → label in-place."""
- userIds = set()
- instanceIds = set()
- for r in items:
- uid = r.get(userKey)
- if uid and not r.get(usernameKey):
- userIds.add(uid)
- iid = r.get(instanceKey)
- if iid:
- instanceIds.add(iid)
+ """Resolve userId -> username and featureInstanceId -> label in-place.
- userMap: Dict[str, str] = {}
- instanceMap: Dict[str, str] = {}
+ Uses the central resolvers from routeHelpers. Returns None (not the raw ID)
+ for unresolvable entries so the frontend can distinguish "resolved" from
+ "missing".
+ """
+ from modules.routes.routeHelpers import resolveUserLabels, resolveInstanceLabels
- try:
- from modules.interfaces.interfaceDbApp import getInterface
- appIf = getInterface(
- context.user,
- mandateId=str(context.mandateId) if context.mandateId else None,
- )
- if userIds:
- users = appIf.getUsersByIds(list(userIds))
- for uid, u in users.items():
- name = getattr(u, "displayName", None) or getattr(u, "email", None) or uid
- userMap[uid] = name
- if instanceIds:
- for iid in instanceIds:
- fi = appIf.getFeatureInstance(iid)
- if fi:
- instanceMap[iid] = getattr(fi, "label", None) or getattr(fi, "featureCode", None) or iid
- except Exception as e:
- logger.debug("_enrichUserAndInstanceLabels: %s", e)
+ userIds = list({r.get(userKey) for r in items if r.get(userKey) and not r.get(usernameKey)})
+ instanceIds = list({r.get(instanceKey) for r in items if r.get(instanceKey)})
+
+ userMap: Dict[str, Optional[str]] = {}
+ instanceMap: Dict[str, Optional[str]] = {}
+
+ if userIds:
+ userMap = resolveUserLabels(userIds)
+ if instanceIds:
+ instanceMap = resolveInstanceLabels(instanceIds)
for r in items:
uid = r.get(userKey)
if uid and not r.get(usernameKey) and uid in userMap:
r[usernameKey] = userMap[uid]
iid = r.get(instanceKey)
- if iid and iid in instanceMap:
- r[instanceLabelKey] = instanceMap[iid]
+ if iid:
+ r[instanceLabelKey] = instanceMap.get(iid)
def _requireAuditAccess(context: RequestContext):
diff --git a/modules/routes/routeBilling.py b/modules/routes/routeBilling.py
index 382b709a..e3d26352 100644
--- a/modules/routes/routeBilling.py
+++ b/modules/routes/routeBilling.py
@@ -20,7 +20,7 @@ from pydantic import BaseModel, Field
from modules.auth import limiter, requirePlatformAdmin, getRequestContext, RequestContext
# Import billing components
-from modules.interfaces.interfaceDbBilling import getInterface as getBillingInterface, _getRootInterface
+from modules.interfaces.interfaceDbBilling import getInterface as getBillingInterface, getRootInterface
from modules.serviceCenter.services.serviceBilling.mainServiceBilling import getService as getBillingService
import json
import math
@@ -140,44 +140,46 @@ def _getBillingDataScope(user) -> BillingDataScope:
def _isAdminOfMandate(ctx: RequestContext, targetMandateId: str) -> bool:
- """Check if user is PlatformAdmin or admin of the specified mandate."""
+ """Check if user is PlatformAdmin or admin of the specified mandate.
+
+ Fail-loud: any DB/lookup error is logged at ERROR and re-raised. We never
+ silently return False — that would mask infrastructure outages as "no
+ permission" and produce confusing 403s instead of actionable 500s.
+ """
if ctx.isPlatformAdmin:
return True
- try:
- from modules.interfaces.interfaceDbApp import getRootInterface
- rootInterface = getRootInterface()
- userMandates = rootInterface.getUserMandates(str(ctx.user.id))
- for um in userMandates:
- if str(getattr(um, 'mandateId', None)) != str(targetMandateId):
- continue
- if not getattr(um, 'enabled', True):
- continue
- umId = str(getattr(um, 'id', ''))
- roleIds = rootInterface.getRoleIdsForUserMandate(umId)
- for roleId in roleIds:
- role = rootInterface.getRole(roleId)
- if role and role.roleLabel == "admin" and not role.featureInstanceId:
- return True
- return False
- except Exception:
- return False
+ from modules.interfaces.interfaceDbApp import getRootInterface
+ rootInterface = getRootInterface()
+ userMandates = rootInterface.getUserMandates(str(ctx.user.id))
+ for um in userMandates:
+ if str(getattr(um, 'mandateId', None)) != str(targetMandateId):
+ continue
+ if not getattr(um, 'enabled', True):
+ continue
+ umId = str(getattr(um, 'id', ''))
+ roleIds = rootInterface.getRoleIdsForUserMandate(umId)
+ for roleId in roleIds:
+ role = rootInterface.getRole(roleId)
+ if role and role.roleLabel == "admin" and not role.featureInstanceId:
+ return True
+ return False
def _isMemberOfMandate(ctx: RequestContext, targetMandateId: str) -> bool:
- """Check if user has any enabled membership in the specified mandate."""
- try:
- from modules.interfaces.interfaceDbApp import getRootInterface
- rootInterface = getRootInterface()
- userMandates = rootInterface.getUserMandates(str(ctx.user.id))
- for um in userMandates:
- if str(getattr(um, 'mandateId', None)) != str(targetMandateId):
- continue
- if not getattr(um, 'enabled', True):
- continue
- return True
- return False
- except Exception:
- return False
+ """Check if user has any enabled membership in the specified mandate.
+
+ Fail-loud: see _isAdminOfMandate above for the same rationale.
+ """
+ from modules.interfaces.interfaceDbApp import getRootInterface
+ rootInterface = getRootInterface()
+ userMandates = rootInterface.getUserMandates(str(ctx.user.id))
+ for um in userMandates:
+ if str(getattr(um, 'mandateId', None)) != str(targetMandateId):
+ continue
+ if not getattr(um, 'enabled', True):
+ continue
+ return True
+ return False
@@ -887,7 +889,7 @@ def confirmCheckoutSession(
if not _isAdminOfMandate(ctx, mandate_id):
raise HTTPException(status_code=403, detail=routeApiMsg("Mandate admin role required"))
- root_billing_interface = _getRootInterface()
+ root_billing_interface = getRootInterface()
return _creditStripeSessionIfNeeded(root_billing_interface, session_dict, eventId=None)
except HTTPException:
raise
@@ -957,10 +959,10 @@ async def stripeWebhook(
sessionMode = session.get("mode") if hasattr(session, "get") else getattr(session, "mode", None)
if sessionMode == "subscription":
- _handleSubscriptionCheckoutCompleted(session, event_id)
+ handleSubscriptionCheckoutCompleted(session, event_id)
return {"received": True}
- billingInterface = _getRootInterface()
+ billingInterface = getRootInterface()
if billingInterface.getStripeWebhookEventByEventId(event_id):
logger.info(f"Stripe event {event_id} already processed, skipping")
return {"received": True}
@@ -997,11 +999,11 @@ async def stripeWebhook(
return {"received": True}
-def _handleSubscriptionCheckoutCompleted(session, eventId: str) -> None:
+def handleSubscriptionCheckoutCompleted(session, eventId: str) -> None:
"""Handle checkout.session.completed for mode=subscription.
Resolves the local PENDING record by ID from webhook metadata and transitions it."""
- from modules.interfaces.interfaceDbSubscription import _getRootInterface as getSubRootInterface
- from modules.datamodels.datamodelSubscription import SubscriptionStatusEnum, _getPlan
+ from modules.interfaces.interfaceDbSubscription import getRootInterface as getSubRootInterface
+ from modules.datamodels.datamodelSubscription import SubscriptionStatusEnum, getPlan
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import (
getService as getSubscriptionService,
_notifySubscriptionChange,
@@ -1033,8 +1035,16 @@ def _handleSubscriptionCheckoutCompleted(session, eventId: str) -> None:
mandateId = metadata.get("mandateId")
planKey = metadata.get("planKey", "")
platformUrl = platformUrl or metadata.get("platformUrl", "")
- except Exception:
- pass
+ except Exception as e:
+ # Stripe lookup is the only way to recover the metadata at this
+ # point — if it fails we MUST surface it, otherwise the webhook
+ # later short-circuits with "missing metadata" and the user
+ # silently gets stuck in PENDING.
+ logger.error(
+ "Stripe Subscription.retrieve(%s) failed during checkout "
+ "metadata recovery: %s", stripeSub, e,
+ )
+ raise
stripeSubId = session.get("subscription")
@@ -1083,7 +1093,17 @@ def _handleSubscriptionCheckoutCompleted(session, eventId: str) -> None:
elif priceMapping and priceId == priceMapping.stripePriceIdInstances:
stripeData["stripeItemIdInstances"] = item["id"]
except Exception as e:
- logger.error("Error retrieving Stripe subscription %s: %s", stripeSubId, e)
+ # Without these enrichment fields the activation completes anyway
+ # (status flips to ACTIVE/SCHEDULED below), but periods + Stripe
+ # item-IDs are missing on the local record, which breaks later
+ # add-on billing and renewal accounting. Re-raise so the webhook
+ # is retried by Stripe instead of silently shipping a broken row.
+ logger.error(
+ "Error retrieving Stripe subscription %s during checkout "
+ "completion (will be retried by Stripe): %s",
+ stripeSubId, e,
+ )
+ raise
if stripeData:
subInterface.updateFields(subscriptionRecordId, stripeData)
@@ -1136,12 +1156,12 @@ def _handleSubscriptionCheckoutCompleted(session, eventId: str) -> None:
subService.invalidateCache(mandateId)
if toStatus == SubscriptionStatusEnum.ACTIVE:
- plan = _getPlan(planKey)
+ plan = getPlan(planKey)
updatedSub = subInterface.getById(subscriptionRecordId)
_notifySubscriptionChange(mandateId, "activated", plan, subscriptionRecord=updatedSub, platformUrl=platformUrl)
try:
- billingIf = _getRootInterface()
+ billingIf = getRootInterface()
billingIf.creditSubscriptionBudget(mandateId, planKey, periodLabel="Erstaktivierung")
except Exception as ex:
logger.error("creditSubscriptionBudget on activation failed: %s", ex)
@@ -1155,8 +1175,8 @@ def _handleSubscriptionCheckoutCompleted(session, eventId: str) -> None:
def _handleSubscriptionWebhook(event) -> None:
"""Process Stripe subscription webhook events.
All record resolution is by stripeSubscriptionId — no mandate-based guessing."""
- from modules.interfaces.interfaceDbSubscription import _getRootInterface as getSubRootInterface
- from modules.datamodels.datamodelSubscription import SubscriptionStatusEnum, _getPlan
+ from modules.interfaces.interfaceDbSubscription import getRootInterface as getSubRootInterface
+ from modules.datamodels.datamodelSubscription import SubscriptionStatusEnum, getPlan
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import (
getService as getSubscriptionService,
_notifySubscriptionChange,
@@ -1205,11 +1225,11 @@ def _handleSubscriptionWebhook(event) -> None:
subInterface.transitionStatus(subId, SubscriptionStatusEnum.SCHEDULED, SubscriptionStatusEnum.ACTIVE)
subService.invalidateCache(mandateId)
planKey = sub.get("planKey", "")
- plan = _getPlan(planKey)
+ plan = getPlan(planKey)
refreshedSub = subInterface.getById(subId)
_notifySubscriptionChange(mandateId, "activated", plan, subscriptionRecord=refreshedSub, platformUrl=webhookPlatformUrl)
try:
- _getRootInterface().creditSubscriptionBudget(mandateId, planKey, periodLabel="Erstaktivierung")
+ getRootInterface().creditSubscriptionBudget(mandateId, planKey, periodLabel="Erstaktivierung")
except Exception as ex:
logger.error("creditSubscriptionBudget SCHEDULED->ACTIVE failed: %s", ex)
logger.info("SCHEDULED -> ACTIVE for sub %s (mandate %s)", subId, mandateId)
@@ -1245,7 +1265,7 @@ def _handleSubscriptionWebhook(event) -> None:
scheduled["id"], SubscriptionStatusEnum.SCHEDULED, SubscriptionStatusEnum.ACTIVE,
)
subService.invalidateCache(mandateId)
- plan = _getPlan(scheduled.get("planKey", ""))
+ plan = getPlan(scheduled.get("planKey", ""))
refreshedScheduled = subInterface.getById(scheduled["id"])
_notifySubscriptionChange(mandateId, "activated", plan, subscriptionRecord=refreshedScheduled, platformUrl=webhookPlatformUrl)
logger.info("Promoted SCHEDULED sub %s -> ACTIVE (mandate %s)", scheduled["id"], mandateId)
@@ -1256,7 +1276,7 @@ def _handleSubscriptionWebhook(event) -> None:
if currentStatus == SubscriptionStatusEnum.ACTIVE:
subInterface.transitionStatus(subId, SubscriptionStatusEnum.ACTIVE, SubscriptionStatusEnum.PAST_DUE)
subService.invalidateCache(mandateId)
- plan = _getPlan(sub.get("planKey", ""))
+ plan = getPlan(sub.get("planKey", ""))
_notifySubscriptionChange(mandateId, "payment_failed", plan, subscriptionRecord=sub, platformUrl=webhookPlatformUrl)
logger.info("Payment failed for sub %s (mandate %s)", subId, mandateId)
@@ -1283,7 +1303,7 @@ def _handleSubscriptionWebhook(event) -> None:
period_start_at = datetime.fromtimestamp(int(period_ts), tz=timezone.utc)
periodLabel = period_start_at.strftime("%Y-%m-%d")
try:
- billing_if = _getRootInterface()
+ billing_if = getRootInterface()
billing_if.resetStorageBillingPeriod(mandateId, period_start_at)
billing_if.reconcileMandateStorageBilling(mandateId)
except Exception as ex:
@@ -1291,7 +1311,7 @@ def _handleSubscriptionWebhook(event) -> None:
planKey = sub.get("planKey", "")
try:
- billing_if = _getRootInterface()
+ billing_if = getRootInterface()
billing_if.creditSubscriptionBudget(mandateId, planKey, periodLabel=periodLabel or "Periodenverlängerung")
except Exception as ex:
logger.error("creditSubscriptionBudget on invoice.paid failed: %s", ex)
@@ -1408,28 +1428,21 @@ def getUsersForMandate(
def _attachCreatedByUserNamesToTransactionRows(rows: List[Dict[str, Any]]) -> None:
- """Resolve createdByUserId to userName using root app interface (sysadmin transaction views)."""
- try:
- from modules.interfaces.interfaceDbApp import getRootInterface
+ """Resolve createdByUserId to userName using central FK resolvers.
- appRoot = getRootInterface()
- userNames: Dict[str, str] = {}
- for row in rows:
- uid = row.get("createdByUserId")
- if not uid:
- row["userName"] = ""
- continue
- if uid not in userNames:
- try:
- u = appRoot.getUser(uid)
- userNames[uid] = u.username if u else uid[:8]
- except Exception:
- userNames[uid] = uid[:8]
- row["userName"] = userNames.get(uid, "")
- except Exception:
- for row in rows:
- uid = row.get("createdByUserId")
- row["userName"] = uid[:8] if uid else ""
+ Returns None (not a truncated UUID) for unresolvable IDs so the frontend
+ renders an explicit NA() indicator instead of a misleading 8-char snippet.
+ """
+ from modules.routes.routeHelpers import resolveUserLabels
+
+ userIds = list({r.get("createdByUserId") for r in rows if r.get("createdByUserId")})
+ userMap: Dict[str, Optional[str]] = {}
+ if userIds:
+ userMap = resolveUserLabels(userIds)
+
+ for row in rows:
+ uid = row.get("createdByUserId")
+ row["userName"] = userMap.get(uid) if uid else None
def _enrichTransactionRows(transactions) -> List[Dict[str, Any]]:
@@ -1717,18 +1730,13 @@ def getUserViewStatistics(
for acc in allAccounts:
accountToMandate[acc.get("id", "")] = acc.get("mandateId", "")
- from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
- mandateIdsForLookup = list(set(accountToMandate.values()))
- mandateMap: Dict[str, str] = {}
- if mandateIdsForLookup:
- rootIface = getAppInterface(ctx.user)
- mandatesById = rootIface.getMandatesByIds(mandateIdsForLookup)
- for mid, m in mandatesById.items():
- mandateMap[mid] = getattr(m, "name", mid) or mid
+ from modules.routes.routeHelpers import resolveMandateLabels
+ mandateIdsForLookup = list({v for v in accountToMandate.values() if v})
+ mandateMap: Dict[str, Optional[str]] = resolveMandateLabels(mandateIdsForLookup) if mandateIdsForLookup else {}
def _mandateName(accountId: str) -> str:
mid = accountToMandate.get(accountId, "")
- return mandateMap.get(mid, mid or "unknown")
+ return mandateMap.get(mid) or f"NA({mid})" if mid else "unknown"
costByMandate: Dict[str, float] = {}
for accId, total in agg.get("costByAccountId", {}).items():
diff --git a/modules/routes/routeDataConnections.py b/modules/routes/routeDataConnections.py
index 290be722..05c8aa9d 100644
--- a/modules/routes/routeDataConnections.py
+++ b/modules/routes/routeDataConnections.py
@@ -127,7 +127,7 @@ def get_auth_authority_options(
# CRUD ENDPOINTS
# ============================================================================
-@router.get("/", response_model=PaginatedResponse[UserConnection])
+@router.get("/")
@limiter.limit("30/minute")
async def get_connections(
request: Request,
@@ -135,7 +135,7 @@ async def get_connections(
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
currentUser: User = Depends(getCurrentUser)
-) -> PaginatedResponse[UserConnection]:
+):
"""Get connections for the current user with optional pagination, sorting, and filtering.
SECURITY: This endpoint is secure - users can only see their own connections.
@@ -151,7 +151,7 @@ async def get_connections(
- GET /api/connections/?mode=filterValues&column=status
- GET /api/connections/?mode=ids
"""
- from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
+ from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels
def _buildEnhancedItems():
interface = getInterface(currentUser)
@@ -252,27 +252,13 @@ async def get_connections(
}
enhanced_connections_dict.append(connection_dict)
- # If no pagination requested, return all items
+ enrichRowsWithFkLabels(enhanced_connections_dict, UserConnection)
+
if paginationParams is None:
- # Convert back to UserConnection objects (enum strings are already in dict)
- items = []
- for conn_dict in enhanced_connections_dict:
- conn_dict_copy = dict(conn_dict)
- if "authority" in conn_dict_copy and isinstance(conn_dict_copy["authority"], str):
- try:
- conn_dict_copy["authority"] = AuthAuthority(conn_dict_copy["authority"])
- except ValueError:
- pass
- if "status" in conn_dict_copy and isinstance(conn_dict_copy["status"], str):
- try:
- conn_dict_copy["status"] = ConnectionStatus(conn_dict_copy["status"])
- except ValueError:
- pass
- items.append(UserConnection(**conn_dict_copy))
- return PaginatedResponse(
- items=items,
- pagination=None
- )
+ return {
+ "items": enhanced_connections_dict,
+ "pagination": None,
+ }
# Apply filtering if provided
if paginationParams.filters:
@@ -292,43 +278,24 @@ async def get_connections(
paginationParams.sort
)
- # Count total items after filters
totalItems = len(enhanced_connections_dict)
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
- # Apply pagination (skip/limit)
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
endIdx = startIdx + paginationParams.pageSize
paged_connections = enhanced_connections_dict[startIdx:endIdx]
- # Convert back to UserConnection objects (convert enum strings back to enums)
- items = []
- for conn_dict in paged_connections:
- # Convert enum strings back to enum objects
- conn_dict_copy = dict(conn_dict)
- if "authority" in conn_dict_copy and isinstance(conn_dict_copy["authority"], str):
- try:
- conn_dict_copy["authority"] = AuthAuthority(conn_dict_copy["authority"])
- except ValueError:
- pass # Keep as string if invalid
- if "status" in conn_dict_copy and isinstance(conn_dict_copy["status"], str):
- try:
- conn_dict_copy["status"] = ConnectionStatus(conn_dict_copy["status"])
- except ValueError:
- pass # Keep as string if invalid
- items.append(UserConnection(**conn_dict_copy))
-
- return PaginatedResponse(
- items=items,
- pagination=PaginationMetadata(
+ return {
+ "items": paged_connections,
+ "pagination": PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=totalItems,
totalPages=totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
- )
- )
+ ).model_dump(),
+ }
except HTTPException:
raise
diff --git a/modules/routes/routeDataFiles.py b/modules/routes/routeDataFiles.py
index 82cf1624..11b90f09 100644
--- a/modules/routes/routeDataFiles.py
+++ b/modules/routes/routeDataFiles.py
@@ -17,6 +17,7 @@ from modules.shared.attributeUtils import getModelAttributeDefinitions
from modules.datamodels.datamodelUam import User
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata, normalize_pagination_dict
from modules.shared.i18nRegistry import apiRouteContext
+from modules.routes.routeHelpers import enrichRowsWithFkLabels
routeApiMsg = apiRouteContext("routeDataFiles")
# Configure logger
@@ -220,7 +221,7 @@ router = APIRouter(
}
)
-@router.get("/list", response_model=PaginatedResponse[FileItem])
+@router.get("/list")
@limiter.limit("120/minute")
def get_files(
request: Request,
@@ -229,7 +230,7 @@ def get_files(
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
currentUser: User = Depends(getCurrentUser),
context: RequestContext = Depends(getRequestContext)
-) -> PaginatedResponse[FileItem]:
+):
"""
Get files with optional pagination, sorting, and filtering.
@@ -303,24 +304,27 @@ def get_files(
recordFilter = {"folderId": fVal}
result = managementInterface.getAllFiles(pagination=paginationParams, recordFilter=recordFilter)
-
+
+ def _filesToDicts(items):
+ return [f.model_dump() if hasattr(f, "model_dump") else (dict(f) if not isinstance(f, dict) else f) for f in items]
+
if paginationParams:
- return PaginatedResponse(
- items=result.items,
- pagination=PaginationMetadata(
+ enriched = enrichRowsWithFkLabels(_filesToDicts(result.items), FileItem)
+ return {
+ "items": enriched,
+ "pagination": PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
- )
- )
+ ).model_dump(),
+ }
else:
- return PaginatedResponse(
- items=result,
- pagination=None
- )
+ items = result if isinstance(result, list) else (result.items if hasattr(result, "items") else [result])
+ enriched = enrichRowsWithFkLabels(_filesToDicts(items), FileItem)
+ return {"items": enriched, "pagination": None}
except HTTPException:
raise
except Exception as e:
@@ -1019,14 +1023,14 @@ def updateFileNeutralize(
# ── File endpoints with path parameters (catch-all /{fileId}) ─────────────────
-@router.get("/{fileId}", response_model=FileItem)
+@router.get("/{fileId}")
@limiter.limit("30/minute")
def get_file(
request: Request,
fileId: str = Path(..., description="ID of the file"),
currentUser: User = Depends(getCurrentUser),
context: RequestContext = Depends(getRequestContext)
-) -> FileItem:
+):
"""Get a file. Resolves the file's mandate/instance scope automatically."""
try:
_mgmt, fileData = _resolveFileWithScope(currentUser, context, fileId)
@@ -1036,7 +1040,9 @@ def get_file(
detail=f"File with ID {fileId} not found"
)
- return fileData
+ fileDict = fileData.model_dump() if hasattr(fileData, "model_dump") else dict(fileData)
+ enriched = enrichRowsWithFkLabels([fileDict], FileItem)
+ return enriched[0]
except interfaceDbManagement.FileNotFoundError as e:
logger.warning(f"File not found: {str(e)}")
diff --git a/modules/routes/routeDataMandates.py b/modules/routes/routeDataMandates.py
index 2bed0169..7972181d 100644
--- a/modules/routes/routeDataMandates.py
+++ b/modules/routes/routeDataMandates.py
@@ -22,7 +22,7 @@ from modules.auth import limiter, requirePlatformAdmin, getRequestContext, getCu
# Import interfaces
import modules.interfaces.interfaceDbApp as interfaceDbApp
-from modules.interfaces.interfaceDbBilling import _getRootInterface as _getBillingRootInterface
+from modules.interfaces.interfaceDbBilling import getRootInterface as _getBillingRootInterface
from modules.shared.attributeUtils import getModelAttributeDefinitions
from modules.shared.auditLogger import audit_logger
@@ -318,7 +318,7 @@ def create_mandate(
from modules.datamodels.datamodelSubscription import (
MandateSubscription, SubscriptionStatusEnum, BUILTIN_PLANS,
)
- from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
+ from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
from datetime import datetime, timezone, timedelta
planKey = mandateData.get("planKey", "TRIAL_14D")
@@ -660,7 +660,7 @@ def list_mandate_users(
from modules.routes.routeHelpers import (
handleFilterValuesInMemory, handleIdsInMemory,
- _applyFiltersAndSort as _sharedApplyFiltersAndSort,
+ applyFiltersAndSort as _sharedApplyFiltersAndSort,
paginateInMemory,
)
@@ -674,13 +674,23 @@ def list_mandate_users(
if paginationParams:
paginationParamsObj = None
- try:
- paginationDict = json.loads(pagination) if pagination else None
+ if pagination:
+ try:
+ paginationDict = json.loads(pagination)
+ except json.JSONDecodeError as e:
+ raise HTTPException(
+ status_code=400,
+ detail=f"Invalid 'pagination' query: not valid JSON ({e.msg})",
+ )
if paginationDict:
- paginationDict = normalize_pagination_dict(paginationDict)
- paginationParamsObj = PaginationParams(**paginationDict)
- except Exception:
- pass
+ try:
+ paginationDict = normalize_pagination_dict(paginationDict)
+ paginationParamsObj = PaginationParams(**paginationDict)
+ except Exception as e:
+ raise HTTPException(
+ status_code=400,
+ detail=f"Invalid 'pagination' payload: {e}",
+ )
filtered = _sharedApplyFiltersAndSort(result, paginationParamsObj)
totalItems = len(filtered)
diff --git a/modules/routes/routeDataPrompts.py b/modules/routes/routeDataPrompts.py
index 79dc8d72..ee99b912 100644
--- a/modules/routes/routeDataPrompts.py
+++ b/modules/routes/routeDataPrompts.py
@@ -44,20 +44,25 @@ def get_prompts(
- filterValues: distinct values for a column (cross-filtered)
- ids: all IDs matching current filters
"""
- from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
+ from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels
+
+ def _promptsToEnrichedDicts(promptItems):
+ dicts = [r.model_dump() if hasattr(r, 'model_dump') else (dict(r) if not isinstance(r, dict) else r) for r in promptItems]
+ enrichRowsWithFkLabels(dicts, Prompt)
+ return dicts
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
managementInterface = interfaceDbManagement.getInterface(currentUser)
result = managementInterface.getAllPrompts(pagination=None)
- items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in result]
+ items = _promptsToEnrichedDicts(result)
return handleFilterValuesInMemory(items, column, pagination)
if mode == "ids":
managementInterface = interfaceDbManagement.getInterface(currentUser)
result = managementInterface.getAllPrompts(pagination=None)
- items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in result]
+ items = _promptsToEnrichedDicts(result)
return handleIdsInMemory(items, pagination)
paginationParams = None
@@ -74,22 +79,24 @@ def get_prompts(
result = managementInterface.getAllPrompts(pagination=paginationParams)
if paginationParams:
- return PaginatedResponse(
- items=result.items,
- pagination=PaginationMetadata(
+ items = _promptsToEnrichedDicts(result.items)
+ return {
+ "items": items,
+ "pagination": PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
- )
- )
+ ).model_dump(),
+ }
else:
- return PaginatedResponse(
- items=result,
- pagination=None
- )
+ items = _promptsToEnrichedDicts(result)
+ return {
+ "items": items,
+ "pagination": None,
+ }
@router.post("", response_model=Prompt)
diff --git a/modules/routes/routeDataUsers.py b/modules/routes/routeDataUsers.py
index ea796aab..67156291 100644
--- a/modules/routes/routeDataUsers.py
+++ b/modules/routes/routeDataUsers.py
@@ -25,12 +25,17 @@ from modules.datamodels.datamodelUam import User, UserInDB, AuthAuthority
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata, normalize_pagination_dict
from modules.shared.i18nRegistry import apiRouteContext
+from modules.routes.routeHelpers import enrichRowsWithFkLabels
routeApiMsg = apiRouteContext("routeDataUsers")
# Configure logger
logger = logging.getLogger(__name__)
+def _usersToDicts(items) -> list:
+ return [u.model_dump() if hasattr(u, "model_dump") else (dict(u) if not isinstance(u, dict) else u) for u in items]
+
+
def _isAdminForUser(context: RequestContext, targetUserId: str) -> bool:
"""
Check if the current user has admin rights for the target user.
@@ -187,7 +192,7 @@ def get_user_options(
# CRUD ENDPOINTS
# ============================================================================
-@router.get("/", response_model=PaginatedResponse[User])
+@router.get("/")
@limiter.limit("30/minute")
def get_users(
request: Request,
@@ -195,7 +200,7 @@ def get_users(
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
-) -> PaginatedResponse[User]:
+):
"""
Get users with optional pagination, sorting, and filtering.
MULTI-TENANT: mandateId from X-Mandate-Id header determines scope.
@@ -236,48 +241,44 @@ def get_users(
# Get users for specific mandate using getUsersByMandate
result = appInterface.getUsersByMandate(str(context.mandateId), paginationParams)
- # getUsersByMandate returns PaginatedResult if pagination was provided
if paginationParams and hasattr(result, 'items'):
- return PaginatedResponse(
- items=result.items,
- pagination=PaginationMetadata(
+ enriched = enrichRowsWithFkLabels(_usersToDicts(result.items), User)
+ return {
+ "items": enriched,
+ "pagination": PaginationMetadata(
currentPage=result.currentPage,
pageSize=result.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
- )
- )
+ ).model_dump(),
+ }
else:
- # No pagination - result is a list
users = result if isinstance(result, list) else result.items if hasattr(result, 'items') else []
- return PaginatedResponse(
- items=users,
- pagination=None
- )
+ enriched = enrichRowsWithFkLabels(_usersToDicts(users), User)
+ return {"items": enriched, "pagination": None}
elif context.isPlatformAdmin:
# PlatformAdmin without mandateId — DB-level pagination via interface
result = appInterface.getAllUsers(paginationParams)
if paginationParams and hasattr(result, 'items'):
- return PaginatedResponse(
- items=result.items,
- pagination=PaginationMetadata(
+ enriched = enrichRowsWithFkLabels(_usersToDicts(result.items), User)
+ return {
+ "items": enriched,
+ "pagination": PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
- )
- )
+ ).model_dump(),
+ }
else:
users = result if isinstance(result, list) else (result.items if hasattr(result, 'items') else [])
- return PaginatedResponse(
- items=users,
- pagination=None
- )
+ enriched = enrichRowsWithFkLabels(_usersToDicts(users), User)
+ return {"items": enriched, "pagination": None}
else:
# Non-SysAdmin without mandateId: aggregate users across all admin mandates
rootInterface = getRootInterface()
@@ -316,34 +317,30 @@ def get_users(
for u in batchUsers.values()
]
- from modules.routes.routeHelpers import _applyFiltersAndSort as _applyFiltersAndSortHelper
+ from modules.routes.routeHelpers import applyFiltersAndSort as _applyFiltersAndSortHelper
filteredUsers = _applyFiltersAndSortHelper(allUsers, paginationParams)
- users = [User(**u) for u in filteredUsers]
+ enriched = enrichRowsWithFkLabels(filteredUsers, User)
if paginationParams:
import math
- totalItems = len(users)
+ totalItems = len(enriched)
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
endIdx = startIdx + paginationParams.pageSize
- paginatedUsers = users[startIdx:endIdx]
- return PaginatedResponse(
- items=paginatedUsers,
- pagination=PaginationMetadata(
+ return {
+ "items": enriched[startIdx:endIdx],
+ "pagination": PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=totalItems,
totalPages=totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
- )
- )
+ ).model_dump(),
+ }
else:
- return PaginatedResponse(
- items=users,
- pagination=None
- )
+ return {"items": enriched, "pagination": None}
except HTTPException:
raise
except Exception as e:
@@ -753,10 +750,10 @@ def send_password_link(
expiryHours = int(APP_CONFIG.get("Auth_RESET_TOKEN_EXPIRY_HOURS", "24"))
try:
- from modules.routes.routeSecurityLocal import _buildAuthEmailHtml, _sendAuthEmail
+ from modules.routes.routeSecurityLocal import buildAuthEmailHtml, sendAuthEmail
emailSubject = "PowerOn - Passwort setzen"
- emailHtml = _buildAuthEmailHtml(
+ emailHtml = buildAuthEmailHtml(
greeting=f"Hallo {targetUser.fullName or targetUser.username}",
bodyLines=[
"Ein Administrator hat einen Link zum Setzen Ihres Passworts angefordert.",
@@ -770,7 +767,7 @@ def send_password_link(
footerText=f"Dieser Link ist {expiryHours} Stunden gültig. Falls Sie diese Anforderung nicht erwartet haben, kontaktieren Sie bitte Ihren Administrator.",
)
- emailSent = _sendAuthEmail(
+ emailSent = sendAuthEmail(
recipient=targetUser.email,
subject=emailSubject,
message="",
diff --git a/modules/routes/routeHelpers.py b/modules/routes/routeHelpers.py
index de2f863b..19bfdb8e 100644
--- a/modules/routes/routeHelpers.py
+++ b/modules/routes/routeHelpers.py
@@ -12,7 +12,7 @@ Provides unified logic for:
import copy
import json
import logging
-from typing import Any, Dict, List, Optional, Callable
+from typing import Any, Dict, List, Optional, Callable, Union
from fastapi.responses import JSONResponse
@@ -29,64 +29,183 @@ logger = logging.getLogger(__name__)
# Central FK label resolvers (cross-DB)
# ---------------------------------------------------------------------------
-def _resolveMandateLabels(ids: List[str]) -> Dict[str, str]:
+def resolveMandateLabels(ids: List[str]) -> Dict[str, Optional[str]]:
+ """Resolve mandate IDs to labels. Returns None (not the ID!) for
+ unresolvable entries so the caller can distinguish "resolved" from "missing".
+ """
from modules.interfaces.interfaceDbApp import getRootInterface
rootIface = getRootInterface()
mMap = rootIface.getMandatesByIds(ids)
- return {
- mid: getattr(m, "label", None) or getattr(m, "name", mid) or mid
- for mid, m in mMap.items()
- }
+ result: Dict[str, Optional[str]] = {}
+ for mid in ids:
+ m = mMap.get(mid)
+ label = (getattr(m, "label", None) or getattr(m, "name", None)) if m else None
+ if not label:
+ logger.warning("resolveMandateLabels: no label for id=%s (found=%s)", mid, m is not None)
+ result[mid] = label or None
+ return result
-def _resolveInstanceLabels(ids: List[str]) -> Dict[str, str]:
+def resolveInstanceLabels(ids: List[str]) -> Dict[str, Optional[str]]:
+ """Resolve feature-instance IDs to labels. Returns None for unresolvable."""
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.interfaces.interfaceFeatures import getFeatureInterface
rootIface = getRootInterface()
featureIface = getFeatureInterface(rootIface.db)
- result: Dict[str, str] = {}
+ result: Dict[str, Optional[str]] = {}
for iid in ids:
fi = featureIface.getFeatureInstance(iid)
- result[iid] = fi.label if fi and fi.label else iid
+ label = fi.label if fi and fi.label else None
+ if not label:
+ logger.warning("resolveInstanceLabels: no label for id=%s (found=%s)", iid, fi is not None)
+ result[iid] = label
return result
-def _resolveUserLabels(ids: List[str]) -> Dict[str, str]:
+def resolveUserLabels(ids: List[str]) -> Dict[str, Optional[str]]:
+ """Resolve user IDs to display names. Returns None for unresolvable."""
from modules.interfaces.interfaceDbApp import getRootInterface
rootIface = getRootInterface()
+ from modules.datamodels.datamodelUam import User as _User
+ uniqueIds = list(set(ids))
users = rootIface.db.getRecordset(
- __import__("modules.datamodels.datamodelUam", fromlist=["User"]).User,
- recordFilter={"id": list(set(ids))},
+ _User,
+ recordFilter={"id": uniqueIds},
)
- result: Dict[str, str] = {}
+ if not users and uniqueIds:
+ logger.warning(
+ "resolveUserLabels: query returned 0 users for %d ids (db=%s, table=%s). "
+ "Attempting full table scan...",
+ len(uniqueIds), getattr(rootIface.db, 'dbDatabase', '?'), _User.__name__,
+ )
+ allUsers = rootIface.db.getRecordset(_User)
+ logger.warning(
+ "resolveUserLabels: full scan found %d users total. Looking for ids: %s",
+ len(allUsers or []), uniqueIds[:3],
+ )
+ users = [u for u in (allUsers or []) if u.get("id") in set(uniqueIds)]
+ result: Dict[str, Optional[str]] = {}
+ found: Dict[str, dict] = {}
for u in (users or []):
uid = u.get("id", "")
- result[uid] = u.get("username") or u.get("email") or uid
+ found[uid] = u
+ for uid in ids:
+ u = found.get(uid)
+ if u:
+ result[uid] = u.get("username") or u.get("email") or None
+ else:
+ logger.warning("resolveUserLabels: user not found for id=%s", uid)
+ result[uid] = None
return result
+def resolveRoleLabels(ids: List[str]) -> Dict[str, Optional[str]]:
+ """Resolve Role.id to roleLabel. Returns None for unresolvable."""
+ if not ids:
+ return {}
+ from modules.interfaces.interfaceDbApp import getRootInterface
+ from modules.datamodels.datamodelRbac import Role as _Role
+ rootIface = getRootInterface()
+ recs = rootIface.db.getRecordset(
+ _Role,
+ recordFilter={"id": list(set(ids))},
+ ) or []
+ out: Dict[str, Optional[str]] = {i: None for i in ids}
+ for r in recs:
+ rid = r.get("id")
+ if rid:
+ out[rid] = r.get("roleLabel") or None
+ for rid in ids:
+ if out.get(rid) is None:
+ logger.warning("resolveRoleLabels: no label for id=%s", rid)
+ return out
+
+
_BUILTIN_FK_RESOLVERS: Dict[str, Callable[[List[str]], Dict[str, str]]] = {
- "Mandate": _resolveMandateLabels,
- "FeatureInstance": _resolveInstanceLabels,
- "User": _resolveUserLabels,
+ "Mandate": resolveMandateLabels,
+ "FeatureInstance": resolveInstanceLabels,
+ "User": resolveUserLabels,
+ "Role": resolveRoleLabels,
}
def _buildLabelResolversFromModel(modelClass: type) -> Dict[str, Callable[[List[str]], Dict[str, str]]]:
"""
- Auto-build labelResolvers dict from fk_model annotations on a Pydantic model.
- Maps field names to resolver functions for all fields that have a known fk_model.
+ Auto-build labelResolvers dict from fk_model / fk_target annotations on a Pydantic model.
+ Maps field names to resolver functions for all fields that have a known FK target.
+ Unlike ``_get_fk_sort_meta`` this does NOT require ``fk_label_field`` — the
+ builtin resolvers already know which column to read.
"""
- from modules.connectors.connectorDbPostgre import _get_fk_sort_meta
- fkMeta = _get_fk_sort_meta(modelClass)
resolvers: Dict[str, Callable[[List[str]], Dict[str, str]]] = {}
- for fieldName, meta in fkMeta.items():
- fkModelName = meta.get("model", "")
- if fkModelName in _BUILTIN_FK_RESOLVERS:
- resolvers[fieldName] = _BUILTIN_FK_RESOLVERS[fkModelName]
+ for name, fieldInfo in modelClass.model_fields.items():
+ extra = fieldInfo.json_schema_extra
+ if not extra or not isinstance(extra, dict):
+ continue
+ fkModel = extra.get("fk_model")
+ tgt = extra.get("fk_target")
+ if not fkModel and isinstance(tgt, dict):
+ fkModel = tgt.get("table")
+ if fkModel and fkModel in _BUILTIN_FK_RESOLVERS:
+ resolvers[name] = _BUILTIN_FK_RESOLVERS[fkModel]
return resolvers
+def enrichRowsWithFkLabels(
+ rows: List[Dict[str, Any]],
+ modelClass: type = None,
+ *,
+ labelResolvers: Optional[Dict[str, Callable[[List[str]], Dict[str, Optional[str]]]]] = None,
+ extraResolvers: Optional[Dict[str, Callable[[List[str]], Dict[str, Optional[str]]]]] = None,
+) -> List[Dict[str, Any]]:
+ """Add ``{field}Label`` columns to each row for every FK field that has a
+ registered resolver.
+
+ ``modelClass`` — if provided, resolvers are auto-built from ``fk_model``
+ annotations on the Pydantic model (via ``_buildLabelResolversFromModel``).
+
+ ``labelResolvers`` — explicit resolver map that overrides auto-built ones.
+
+ ``extraResolvers`` — merged on top of auto-built / explicit resolvers. Use
+ for ad-hoc fields that are not FK-annotated on the model (e.g.
+ ``createdByUserId`` on billing transactions).
+
+ If a label cannot be resolved the ``{field}Label`` value is ``None``
+ (never the raw ID — that would reintroduce the silent-truncation bug).
+ """
+ resolvers: Dict[str, Callable] = {}
+
+ if modelClass is not None and labelResolvers is None:
+ resolvers = _buildLabelResolversFromModel(modelClass)
+ elif labelResolvers is not None:
+ resolvers = dict(labelResolvers)
+
+ if extraResolvers:
+ resolvers.update(extraResolvers)
+
+ if not resolvers or not rows:
+ return rows
+
+ for field, resolver in resolvers.items():
+ ids = list({str(r.get(field)) for r in rows if r.get(field)})
+ if not ids:
+ continue
+ try:
+ labelMap = resolver(ids)
+ except Exception as e:
+ logger.error("enrichRowsWithFkLabels: resolver for '%s' raised: %s", field, e)
+ labelMap = {}
+
+ labelKey = f"{field}Label"
+ for r in rows:
+ fkVal = r.get(field)
+ if fkVal:
+ r[labelKey] = labelMap.get(str(fkVal))
+ else:
+ r[labelKey] = None
+
+ return rows
+
+
# ---------------------------------------------------------------------------
# Cross-filter pagination parsing
# ---------------------------------------------------------------------------
@@ -210,7 +329,7 @@ def handleIdsMode(
# In-memory helpers (for enriched / non-SQL routes)
# ---------------------------------------------------------------------------
-def _applyFiltersAndSort(
+def applyFiltersAndSort(
items: List[Dict[str, Any]],
paginationParams: Optional[PaginationParams],
) -> List[Dict[str, Any]]:
@@ -364,12 +483,21 @@ def _extractDistinctValues(
items: List[Dict[str, Any]],
columnKey: str,
requestLang: Optional[str] = None,
-) -> List[str]:
- """Extract sorted distinct display values for a column from enriched items."""
+) -> List[Optional[str]]:
+ """Extract sorted distinct display values for a column from enriched items.
+
+ Includes ``None`` as the last entry when at least one row has a null/empty
+ value — this enables the "(Leer)" filter option in the frontend.
+ """
+ _MISSING = object()
values = set()
+ hasEmpty = False
for item in items:
- val = item.get(columnKey)
+ val = item.get(columnKey, _MISSING)
+ if val is _MISSING:
+ continue
if val is None or val == "":
+ hasEmpty = True
continue
if isinstance(val, bool):
values.add("true" if val else "false")
@@ -381,7 +509,10 @@ def _extractDistinctValues(
values.add(text)
else:
values.add(str(val))
- return sorted(values, key=lambda v: v.lower())
+ result: List[Optional[str]] = sorted(values, key=lambda v: v.lower())
+ if hasEmpty:
+ result.append(None)
+ return result
def handleFilterValuesInMemory(
@@ -396,7 +527,7 @@ def handleFilterValuesInMemory(
Returns JSONResponse to bypass FastAPI response_model validation.
"""
crossFilterParams = parseCrossFilterPagination(column, paginationJson)
- crossFiltered = _applyFiltersAndSort(items, crossFilterParams)
+ crossFiltered = applyFiltersAndSort(items, crossFilterParams)
return JSONResponse(content=_extractDistinctValues(crossFiltered, column, requestLang))
@@ -411,7 +542,7 @@ def handleIdsInMemory(
Returns JSONResponse to bypass FastAPI response_model validation.
"""
pagination = parsePaginationForIds(paginationJson)
- filtered = _applyFiltersAndSort(items, pagination)
+ filtered = applyFiltersAndSort(items, pagination)
ids = []
for item in filtered:
val = item.get(idField)
@@ -510,6 +641,7 @@ def getRecordsetPaginatedWithFkSort(
idOrder = {pid: idx for idx, pid in enumerate(pageIds)}
pageItems.sort(key=lambda r: idOrder.get(r.get(idField), 999999))
+ enrichRowsWithFkLabels(pageItems, modelClass)
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
return {"items": pageItems, "totalItems": totalItems, "totalPages": totalPages}
diff --git a/modules/routes/routeI18n.py b/modules/routes/routeI18n.py
index cadf128e..927d1bf2 100644
--- a/modules/routes/routeI18n.py
+++ b/modules/routes/routeI18n.py
@@ -26,7 +26,7 @@ from fastapi.responses import Response
from pydantic import BaseModel, Field
from modules.auth import getCurrentUser, requireSysAdmin, requirePlatformAdmin
-from modules.connectors.connectorDbPostgre import _get_cached_connector
+from modules.connectors.connectorDbPostgre import getCachedConnector
from modules.datamodels.datamodelAi import (
AiCallOptions,
AiCallRequest,
@@ -40,11 +40,11 @@ from modules.datamodels.datamodelRbac import Role
from modules.datamodels.datamodelFeatures import Feature
from modules.datamodels.datamodelNotification import NotificationType
from modules.interfaces.interfaceDbManagement import getInterface as getMgmtInterface
-from modules.routes.routeNotifications import _createNotification
+from modules.routes.routeNotifications import createNotification
from modules.shared.configuration import APP_CONFIG
from modules.shared.i18nRegistry import (
_enforceSourcePlaceholders,
- _loadCache as _reloadI18nCache,
+ loadCache as _reloadI18nCache,
apiRouteContext,
)
from modules.shared.timeUtils import getUtcTimestamp
@@ -109,7 +109,7 @@ _ISO_PRIORITY_CODES: List[str] = ["de", "gsw", "en", "fr", "it"]
# ---------------------------------------------------------------------------
def _publicMgmtDb():
- return _get_cached_connector(
+ return getCachedConnector(
dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
dbDatabase="poweron_management",
dbUser=APP_CONFIG.get("DB_USER"),
@@ -729,7 +729,7 @@ async def _run_create_language_job_async(userId: str, code: str, label: str, cur
tmCount = await _translateTextMultilingualFields(db, code, label, billingCb)
- _createNotification(
+ createNotification(
userId,
NotificationType.SYSTEM,
title="Sprachset erstellt",
@@ -739,7 +739,7 @@ async def _run_create_language_job_async(userId: str, code: str, label: str, cur
logger.info("i18n create job done: code=%s, translated=%d/%d, tm_fields=%d", code, len(translated), len(xxEntries), tmCount)
except Exception as e:
logger.exception("create language job failed: %s", e)
- _createNotification(
+ createNotification(
userId,
NotificationType.SYSTEM,
title="Sprachset fehlgeschlagen",
@@ -790,7 +790,7 @@ async def create_language_set(
db.recordCreate(UiLanguageSet, rec)
background.add_task(_run_create_language_job, uid, code, resolvedLabel, currentUser, mandateId)
- _createNotification(
+ createNotification(
uid,
NotificationType.SYSTEM,
title="Sprachset wird erzeugt",
diff --git a/modules/routes/routeInvitations.py b/modules/routes/routeInvitations.py
index 7e852b54..8138775f 100644
--- a/modules/routes/routeInvitations.py
+++ b/modules/routes/routeInvitations.py
@@ -21,7 +21,7 @@ from pydantic import BaseModel, Field, model_validator
from modules.auth import limiter, getRequestContext, RequestContext, getCurrentUser
from modules.datamodels.datamodelUam import User
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
-from modules.routes.routeHelpers import _applyFiltersAndSort, handleFilterValuesInMemory, handleIdsInMemory
+from modules.routes.routeHelpers import applyFiltersAndSort, handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels
from modules.datamodels.datamodelInvitation import Invitation
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.shared.timeUtils import getUtcTimestamp
@@ -302,8 +302,8 @@ def create_invitation(
emailSubject = f"Einladung zu {mandateName}"
invite_desc = f"dem Mandanten «{mandateName}» beizutreten"
- from modules.routes.routeSecurityLocal import _buildAuthEmailHtml
- emailBody = _buildAuthEmailHtml(
+ from modules.routes.routeSecurityLocal import buildAuthEmailHtml
+ emailBody = buildAuthEmailHtml(
greeting=f"Hallo {display_name}",
bodyLines=[
f"Sie wurden eingeladen, {invite_desc}.",
@@ -496,20 +496,22 @@ def list_invitations(
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
if paginationParams:
- filtered = _applyFiltersAndSort(result, paginationParams)
+ filtered = applyFiltersAndSort(result, paginationParams)
totalItems = len(filtered)
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
endIdx = startIdx + paginationParams.pageSize
+ enriched = enrichRowsWithFkLabels(filtered[startIdx:endIdx], Invitation)
return {
- "items": filtered[startIdx:endIdx],
+ "items": enriched,
"pagination": PaginationMetadata(
currentPage=paginationParams.page, pageSize=paginationParams.pageSize,
totalItems=totalItems, totalPages=totalPages,
sort=paginationParams.sort, filters=paginationParams.filters,
).model_dump(),
}
- return result
+ enriched = enrichRowsWithFkLabels(result, Invitation)
+ return {"items": enriched, "pagination": None}
except HTTPException:
raise
@@ -809,13 +811,13 @@ def accept_invitation(
if featureInstanceId:
existingAccess = rootInterface.getFeatureAccess(str(currentUser.id), featureInstanceId)
if existingAccess:
- # Update existing access with additional roles
+ # Update existing access with additional roles. addRoleToFeatureAccess
+ # is already idempotent (returns silently when the role is already
+ # assigned), so any exception here is a real error and must be
+ # surfaced — not swallowed.
featureAccessId = str(existingAccess.id)
for roleId in roleIds:
- try:
- rootInterface.addRoleToFeatureAccess(str(existingAccess.id), roleId)
- except Exception:
- pass # Role might already be assigned
+ rootInterface.addRoleToFeatureAccess(str(existingAccess.id), roleId)
message = "Roles updated for existing feature access"
else:
# Create feature access with instance-level roles
@@ -828,14 +830,13 @@ def accept_invitation(
featureAccessId = str(featureAccess.id)
message = "Successfully joined feature instance"
else:
- # Legacy: mandate-only invitation (no feature instance)
+ # Legacy: mandate-only invitation (no feature instance).
+ # addRoleToUserMandate is already idempotent — any exception here
+ # is a real error (e.g. DB / FK constraint) and must propagate.
existingMembership = rootInterface.getUserMandate(str(currentUser.id), mandateId)
if existingMembership:
for roleId in roleIds:
- try:
- rootInterface.addRoleToUserMandate(str(existingMembership.id), roleId)
- except Exception:
- pass
+ rootInterface.addRoleToUserMandate(str(existingMembership.id), roleId)
message = "Roles updated for existing membership"
else:
rootInterface.createUserMandate(
diff --git a/modules/routes/routeNotifications.py b/modules/routes/routeNotifications.py
index 41d7fe26..c1cacb17 100644
--- a/modules/routes/routeNotifications.py
+++ b/modules/routes/routeNotifications.py
@@ -52,7 +52,7 @@ class UnreadCountResponse(BaseModel):
# Helper Functions
# =============================================================================
-def _createNotification(
+def createNotification(
userId: str,
notificationType: NotificationType,
title: str,
@@ -103,7 +103,7 @@ def create_access_change_notification(
Failures are logged only so RBAC mutations still succeed.
"""
try:
- _createNotification(
+ createNotification(
userId=userId,
notificationType=NotificationType.SYSTEM,
title=title,
@@ -132,7 +132,7 @@ def createInvitationNotification(
msg = f"{inviterName} hat Sie zur Feature-Instanz '{featureInstanceName}' eingeladen."
else:
msg = f"{inviterName} hat Sie zu '{mandateName}' eingeladen."
- return _createNotification(
+ return createNotification(
userId=userId,
notificationType=NotificationType.INVITATION,
title="Neue Einladung",
diff --git a/modules/routes/routeSecurityLocal.py b/modules/routes/routeSecurityLocal.py
index b6227cb0..807d5192 100644
--- a/modules/routes/routeSecurityLocal.py
+++ b/modules/routes/routeSecurityLocal.py
@@ -28,7 +28,7 @@ routeApiMsg = apiRouteContext("routeSecurityLocal")
logger = logging.getLogger(__name__)
-def _buildAuthEmailHtml(
+def buildAuthEmailHtml(
greeting: str,
bodyLines: list,
buttonText: str = None,
@@ -118,7 +118,7 @@ def _buildAuthEmailHtml(