cleanup internal marked exports

This commit is contained in:
ValueOn AG 2026-04-26 08:31:35 +02:00
parent 794ba36f27
commit 24f0c3e2eb
104 changed files with 2983 additions and 1055 deletions

14
app.py
View file

@ -327,9 +327,9 @@ async def lifespan(app: FastAPI):
# Sync gateway i18n registry to DB and load translation cache
try:
from modules.shared.i18nRegistry import _syncRegistryToDb, _loadCache
await _syncRegistryToDb()
await _loadCache()
from modules.shared.i18nRegistry import syncRegistryToDb, loadCache
await syncRegistryToDb()
await loadCache()
logger.info("i18n registry sync + cache load completed")
except Exception as e:
logger.warning(f"i18n registry sync failed (non-critical): {e}")
@ -522,15 +522,15 @@ from modules.auth import (
# Per-request context middleware: language (Accept-Language) + user timezone (X-User-Timezone).
# Both are written into ContextVars and consumed by t() / resolveText() and getRequestNow()
# without having to thread them through every call site.
from modules.shared.i18nRegistry import _setLanguage, normalizePrimaryLanguageTag
from modules.shared.timeUtils import _setRequestTimezone
from modules.shared.i18nRegistry import setLanguage, normalizePrimaryLanguageTag
from modules.shared.timeUtils import setRequestTimezone
@app.middleware("http")
async def _requestContextMiddleware(request: Request, call_next):
acceptLang = request.headers.get("Accept-Language", "")
lang = normalizePrimaryLanguageTag(acceptLang, "de")
_setLanguage(lang)
_setRequestTimezone(request.headers.get("X-User-Timezone", ""))
setLanguage(lang)
setRequestTimezone(request.headers.get("X-User-Timezone", ""))
return await call_next(request)
app.add_middleware(CSRFMiddleware)

View file

@ -76,7 +76,7 @@ def _isJsonbType(fieldType) -> bool:
return False
def _get_model_fields(model_class) -> Dict[str, str]:
def getModelFields(model_class) -> Dict[str, str]:
"""Get all fields from Pydantic model and map to SQL types.
Supports explicit db_type override via json_schema_extra={"db_type": "vector(1536)"}.
@ -122,21 +122,27 @@ def _get_model_fields(model_class) -> Dict[str, str]:
def _get_fk_sort_meta(model_class) -> Dict[str, Dict[str, str]]:
"""Map FK field name -> {model, labelField} from json_schema_extra (fk_model + frontend_fk_display_field)."""
"""Map FK field name -> {model, labelField} from json_schema_extra (``fk_model`` + ``fk_label_field``).
``fk_model`` may be omitted if ``fk_target.table`` is set (table name = resolver / JOIN key).
"""
result: Dict[str, Dict[str, str]] = {}
for name, field_info in model_class.model_fields.items():
extra = field_info.json_schema_extra
if not extra or not isinstance(extra, dict):
continue
fk_model = extra.get("fk_model")
label_field = extra.get("frontend_fk_display_field")
tgt = extra.get("fk_target")
if not fk_model and isinstance(tgt, dict) and tgt.get("table"):
fk_model = tgt["table"]
label_field = extra.get("fk_label_field")
if fk_model and label_field:
result[name] = {"model": str(fk_model), "labelField": str(label_field)}
return result
def _parseRecordFields(record: Dict[str, Any], fields: Dict[str, str], context: str = "") -> None:
def parseRecordFields(record: Dict[str, Any], fields: Dict[str, str], context: str = "") -> None:
"""Parse record fields in-place: numeric typing, vector parsing, JSONB deserialization."""
import json as _json
@ -189,7 +195,7 @@ _current_user_id: contextvars.ContextVar[Optional[str]] = contextvars.ContextVar
)
def _get_cached_connector(
def getCachedConnector(
dbHost: str,
dbDatabase: str,
dbUser: str = None,
@ -553,7 +559,7 @@ class DatabaseConnector:
}
# Desired columns based on model
model_fields = _get_model_fields(model_class)
model_fields = getModelFields(model_class)
desired_columns = set(["id"]) | set(model_fields.keys())
# Add missing columns
@ -633,7 +639,7 @@ class DatabaseConnector:
def _create_table_from_model(self, cursor, table: str, model_class: type) -> None:
"""Create table with columns matching Pydantic model fields."""
fields = _get_model_fields(model_class)
fields = getModelFields(model_class)
# Enable pgvector if any field uses vector type
if any(_isVectorType(sqlType) for sqlType in fields.values()):
@ -666,7 +672,7 @@ class DatabaseConnector:
) -> None:
"""Save record to normalized table with explicit columns."""
# Get columns from Pydantic model instead of database schema
fields = _get_model_fields(model_class)
fields = getModelFields(model_class)
columns = ["id"] + [field for field in fields.keys() if field != "id"]
if not columns:
@ -751,9 +757,9 @@ class DatabaseConnector:
# Convert row to dict and handle JSONB fields
record = dict(row)
fields = _get_model_fields(model_class)
fields = getModelFields(model_class)
_parseRecordFields(record, fields, f"record {recordId}")
parseRecordFields(record, fields, f"record {recordId}")
return record
except Exception as e:
@ -822,10 +828,10 @@ class DatabaseConnector:
cursor.execute(f'SELECT * FROM "{table}" ORDER BY "id"')
records = [dict(row) for row in cursor.fetchall()]
fields = _get_model_fields(model_class)
fields = getModelFields(model_class)
modelFields = model_class.model_fields
for record in records:
_parseRecordFields(record, fields, f"table {table}")
parseRecordFields(record, fields, f"table {table}")
# Set type-aware defaults for NULL JSONB fields
for fieldName, fieldType in fields.items():
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
@ -1011,10 +1017,10 @@ class DatabaseConnector:
cursor.execute(query, where_values)
records = [dict(row) for row in cursor.fetchall()]
fields = _get_model_fields(model_class)
fields = getModelFields(model_class)
modelFields = model_class.model_fields
for record in records:
_parseRecordFields(record, fields, f"table {table}")
parseRecordFields(record, fields, f"table {table}")
for fieldName, fieldType in fields.items():
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
fieldInfo = modelFields.get(fieldName)
@ -1055,7 +1061,7 @@ class DatabaseConnector:
Translate PaginationParams + recordFilter into SQL clauses.
Returns (where_clause, order_clause, limit_clause, values, count_values).
"""
fields = _get_model_fields(model_class)
fields = getModelFields(model_class)
validColumns = set(fields.keys())
where_parts: List[str] = []
@ -1214,10 +1220,10 @@ class DatabaseConnector:
cursor.execute(dataSql, values)
records = [dict(row) for row in cursor.fetchall()]
fields = _get_model_fields(model_class)
fields = getModelFields(model_class)
modelFields = model_class.model_fields
for record in records:
_parseRecordFields(record, fields, f"table {table}")
parseRecordFields(record, fields, f"table {table}")
for fieldName, fieldType in fields.items():
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
fieldInfo = modelFields.get(fieldName)
@ -1235,10 +1241,13 @@ class DatabaseConnector:
if fieldFilter and isinstance(fieldFilter, list):
records = [{f: r[f] for f in fieldFilter if f in r} for r in records]
pageSize = pagination.pageSize if pagination else max(totalItems, 1)
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
from modules.routes.routeHelpers import enrichRowsWithFkLabels
enrichRowsWithFkLabels(records, model_class)
return {"items": records, "totalItems": totalItems, "totalPages": totalPages}
pageSize = pagination.pageSize if pagination else max(totalItems, 1)
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
return {"items": records, "totalItems": totalItems, "totalPages": totalPages}
except Exception as e:
logger.error(f"Error in getRecordsetPaginated for table {table}: {e}")
return {"items": [], "totalItems": 0, "totalPages": 0}
@ -1249,13 +1258,18 @@ class DatabaseConnector:
column: str,
pagination=None,
recordFilter: Dict[str, Any] = None,
) -> List[str]:
"""
Returns sorted distinct non-null values for a column using SQL DISTINCT.
includeEmpty: bool = True,
) -> List[Optional[str]]:
"""Return sorted distinct values for a column using SQL DISTINCT.
When ``includeEmpty`` is True (default), NULL and empty-string rows are
represented as a single ``None`` entry at the end of the list this
allows the frontend to offer a "(Leer)" filter option.
Applies cross-filtering (all filters except the requested column).
"""
table = model_class.__name__
fields = _get_model_fields(model_class)
fields = getModelFields(model_class)
if column not in fields:
return []
@ -1274,18 +1288,28 @@ class DatabaseConnector:
where_clause, _, _, values, _ = \
self._buildPaginationClauses(model_class, pagination, recordFilter)
sql = (
f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{where_clause} '
f'WHERE "{column}" IS NOT NULL AND "{column}"::TEXT != \'\' '
if not where_clause else
f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{where_clause} '
f'AND "{column}" IS NOT NULL AND "{column}"::TEXT != \'\' '
)
sql += 'ORDER BY val'
nonNullCond = f'"{column}" IS NOT NULL AND "{column}"::TEXT != \'\''
if where_clause:
sql = f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{where_clause} AND {nonNullCond} ORDER BY val'
else:
sql = f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}" WHERE {nonNullCond} ORDER BY val'
with self.connection.cursor() as cursor:
cursor.execute(sql, values)
return [row["val"] for row in cursor.fetchall()]
result: List[Optional[str]] = [row["val"] for row in cursor.fetchall()]
if includeEmpty:
emptyCond = f'"{column}" IS NULL OR "{column}"::TEXT = \'\''
if where_clause:
emptySql = f'SELECT 1 FROM "{table}"{where_clause} AND ({emptyCond}) LIMIT 1'
else:
emptySql = f'SELECT 1 FROM "{table}" WHERE ({emptyCond}) LIMIT 1'
with self.connection.cursor() as cursor:
cursor.execute(emptySql, values)
if cursor.fetchone():
result.append(None)
return result
except Exception as e:
logger.error(f"Error in getDistinctColumnValues for {table}.{column}: {e}")
return []
@ -1419,7 +1443,7 @@ class DatabaseConnector:
if not self._ensureTableExists(model_class):
raise ValueError(f"Table {table} does not exist")
fields = _get_model_fields(model_class)
fields = getModelFields(model_class)
columns = ["id"] + [f for f in fields.keys() if f != "id"]
modelFields = model_class.model_fields
@ -1541,7 +1565,7 @@ class DatabaseConnector:
if not self._ensureTableExists(model_class):
return 0
fields = _get_model_fields(model_class)
fields = getModelFields(model_class)
clauses: List[str] = []
params: List[Any] = []
for key, val in recordFilter.items():
@ -1659,9 +1683,9 @@ class DatabaseConnector:
cursor.execute(query, params)
records = [dict(row) for row in cursor.fetchall()]
fields = _get_model_fields(modelClass)
fields = getModelFields(modelClass)
for record in records:
_parseRecordFields(record, fields, f"semanticSearch {table}")
parseRecordFields(record, fields, f"semanticSearch {table}")
return records
except Exception as e:

View file

@ -8,12 +8,12 @@ from pydantic import BaseModel, Field
from modules.shared.i18nRegistry import i18nModel
_MODEL_REGISTRY: Dict[str, Type["PowerOnModel"]] = {}
MODEL_REGISTRY: Dict[str, Type["PowerOnModel"]] = {}
def _getModelByTableName(tableName: str) -> Optional[Type["PowerOnModel"]]:
"""Look up a PowerOnModel subclass by its table name (= class name)."""
return _MODEL_REGISTRY.get(tableName)
return MODEL_REGISTRY.get(tableName)
@i18nModel("Basisdatensatz")
@ -22,7 +22,7 @@ class PowerOnModel(BaseModel):
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
_MODEL_REGISTRY[cls.__name__] = cls
MODEL_REGISTRY[cls.__name__] = cls
sysCreatedAt: Optional[float] = Field(
default=None,
@ -46,6 +46,9 @@ class PowerOnModel(BaseModel):
"frontend_required": False,
"frontend_visible": False,
"system": True,
"fk_model": "User",
"fk_label_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
},
)
sysModifiedAt: Optional[float] = Field(
@ -70,5 +73,8 @@ class PowerOnModel(BaseModel):
"frontend_required": False,
"frontend_visible": False,
"system": True,
"fk_model": "User",
"fk_label_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
},
)

View file

@ -30,9 +30,8 @@ class FileItem(PowerOnModel):
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label",
"fk_model": "Mandate",
"fk_label_field": "label",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
)
@ -44,9 +43,8 @@ class FileItem(PowerOnModel):
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"frontend_fk_source": "/api/features/instances",
"frontend_fk_display_field": "label",
"fk_model": "FeatureInstance",
"fk_label_field": "label",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
)

View file

@ -31,9 +31,8 @@ class UserMandate(PowerOnModel):
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/users/",
"frontend_fk_display_field": "username",
"fk_model": "User",
"fk_label_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
},
)
@ -44,9 +43,8 @@ class UserMandate(PowerOnModel):
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label",
"fk_model": "Mandate",
"fk_label_field": "label",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
)
@ -75,8 +73,8 @@ class FeatureAccess(PowerOnModel):
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/users/",
"frontend_fk_display_field": "username",
"fk_model": "User",
"fk_label_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
},
)
@ -87,8 +85,8 @@ class FeatureAccess(PowerOnModel):
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/features/instances",
"frontend_fk_display_field": "label",
"fk_model": "FeatureInstance",
"fk_label_field": "label",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
)
@ -127,8 +125,8 @@ class UserMandateRole(PowerOnModel):
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/rbac/roles",
"frontend_fk_display_field": "roleLabel",
"fk_model": "Role",
"fk_label_field": "roleLabel",
"fk_target": {"db": "poweron_app", "table": "Role"},
},
)
@ -162,8 +160,8 @@ class FeatureAccessRole(PowerOnModel):
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/rbac/roles",
"frontend_fk_display_field": "roleLabel",
"fk_model": "Role",
"fk_label_field": "roleLabel",
"fk_target": {"db": "poweron_app", "table": "Role"},
},
)

View file

@ -63,8 +63,8 @@ class Role(PowerOnModel):
"frontend_readonly": True,
"frontend_visible": True,
"frontend_required": False,
"frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label",
"fk_model": "Mandate",
"fk_label_field": "label",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
)
@ -77,8 +77,8 @@ class Role(PowerOnModel):
"frontend_readonly": True,
"frontend_visible": True,
"frontend_required": False,
"frontend_fk_source": "/api/features/instances",
"frontend_fk_display_field": "label",
"fk_model": "FeatureInstance",
"fk_label_field": "label",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
)
@ -115,8 +115,8 @@ class AccessRule(PowerOnModel):
"frontend_type": "select",
"frontend_readonly": True,
"frontend_required": True,
"frontend_fk_source": "/api/rbac/roles",
"frontend_fk_display_field": "roleLabel",
"fk_model": "Role",
"fk_label_field": "roleLabel",
"fk_target": {"db": "poweron_app", "table": "Role"},
},
)

View file

@ -407,7 +407,7 @@ BUILTIN_PLANS: Dict[str, SubscriptionPlan] = {
}
def _getPlan(planKey: str) -> Optional[SubscriptionPlan]:
def getPlan(planKey: str) -> Optional[SubscriptionPlan]:
"""Resolve a plan by key from the built-in catalog."""
return BUILTIN_PLANS.get(planKey)

View file

@ -397,6 +397,8 @@ class UserConnection(PowerOnModel):
"frontend_readonly": True,
"frontend_required": False,
"label": "Benutzer-ID",
"fk_model": "User",
"fk_label_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
},
)
@ -650,7 +652,7 @@ class UserInDB(User):
)
def _normalizeTtsVoiceMap(value: Any) -> Optional[Dict[str, str]]:
def normalizeTtsVoiceMap(value: Any) -> Optional[Dict[str, str]]:
"""
Coerce ttsVoiceMap payloads to Dict[str, str].
@ -728,6 +730,6 @@ class UserVoicePreferences(PowerOnModel):
@field_validator("ttsVoiceMap", mode="before")
@classmethod
def _validateTtsVoiceMap(cls, value: Any) -> Optional[Dict[str, str]]:
return _normalizeTtsVoiceMap(value)
return normalizeTtsVoiceMap(value)

View file

@ -177,7 +177,7 @@ def _groupKeyForPart(part: ContentPart) -> Tuple[str, int, str]:
_VALID_DOC_SOURCES = frozenset({"pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"})
def _contentPartsToUdm(extracted: ContentExtracted, sourceType: str, sourcePath: str) -> UdmDocument:
def contentPartsToUdm(extracted: ContentExtracted, sourceType: str, sourcePath: str) -> UdmDocument:
"""Convert flat ContentPart list into a UdmDocument using structural heuristics."""
parts = list(extracted.parts or [])
st: Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"] = (
@ -290,7 +290,7 @@ def _stripUdmForReferences(udm: UdmDocument) -> UdmDocument:
return clone
def _applyUdmOutputDetail(udm: UdmDocument, detail: str) -> UdmDocument:
def applyUdmOutputDetail(udm: UdmDocument, detail: str) -> UdmDocument:
if detail == "structure":
return _stripUdmRaw(udm)
if detail == "references":
@ -298,7 +298,7 @@ def _applyUdmOutputDetail(udm: UdmDocument, detail: str) -> UdmDocument:
return udm
def _mimeToUdmSourceType(mimeType: str, fileName: str) -> Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"]:
def mimeToUdmSourceType(mimeType: str, fileName: str) -> Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"]:
m = (mimeType or "").lower()
fn = (fileName or "").lower()
if m == "application/pdf" or fn.endswith(".pdf"):

View file

@ -27,6 +27,8 @@ class Prompt(PowerOnModel):
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_model": "Mandate",
"fk_label_field": "label",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
)

View file

@ -2,7 +2,7 @@
Demo Configs Auto-Discovery Module
Scans this folder for Python files that contain subclasses of _BaseDemoConfig
and exposes them via _getAvailableDemoConfigs().
and exposes them via getAvailableDemoConfigs().
"""
import importlib
@ -18,7 +18,7 @@ logger = logging.getLogger(__name__)
_configCache: Dict[str, _BaseDemoConfig] = {}
def _getAvailableDemoConfigs() -> Dict[str, _BaseDemoConfig]:
def getAvailableDemoConfigs() -> Dict[str, _BaseDemoConfig]:
"""Return a dict of code -> instance for every discovered demo config."""
if _configCache:
return _configCache
@ -43,7 +43,7 @@ def _getAvailableDemoConfigs() -> Dict[str, _BaseDemoConfig]:
return _configCache
def _getDemoConfigByCode(code: str) -> _BaseDemoConfig | None:
def getDemoConfigByCode(code: str) -> _BaseDemoConfig | None:
"""Get a specific demo config by its code."""
configs = _getAvailableDemoConfigs()
configs = getAvailableDemoConfigs()
return configs.get(code)

View file

@ -447,10 +447,10 @@ class InvestorDemo2026(_BaseDemoConfig):
if not mandateId:
return
try:
from modules.interfaces.interfaceDbBilling import _getRootInterface
from modules.interfaces.interfaceDbBilling import getRootInterface
from modules.datamodels.datamodelBilling import BillingSettings
billingInterface = _getRootInterface()
billingInterface = getRootInterface()
existingSettings = billingInterface.getSettings(mandateId)
if existingSettings:
summary["skipped"].append(f"Billing for {mandateLabel} exists")
@ -532,8 +532,8 @@ class InvestorDemo2026(_BaseDemoConfig):
summary["removed"].append(f"{len(roles)} roles in {mandateLabel}")
try:
from modules.interfaces.interfaceDbBilling import _getRootInterface
billingDb = _getRootInterface().db
from modules.interfaces.interfaceDbBilling import getRootInterface
billingDb = getRootInterface().db
billingSettings = billingDb.getRecordset(BillingSettings, recordFilter={"mandateId": mandateId}) or []
for bs in billingSettings:
billingDb.recordDelete(BillingSettings, bs.get("id"))

View file

@ -377,9 +377,9 @@ class PwgDemo2026(_BaseDemoConfig):
return
try:
from modules.datamodels.datamodelBilling import BillingSettings
from modules.interfaces.interfaceDbBilling import _getRootInterface
from modules.interfaces.interfaceDbBilling import getRootInterface
billingInterface = _getRootInterface()
billingInterface = getRootInterface()
existingSettings = billingInterface.getSettings(mandateId)
if existingSettings:
summary["skipped"].append(f"Billing for {mandateLabel} exists")
@ -708,8 +708,8 @@ class PwgDemo2026(_BaseDemoConfig):
db.recordDelete(Role, role.get("id"))
try:
from modules.interfaces.interfaceDbBilling import _getRootInterface
billingDb = _getRootInterface().db
from modules.interfaces.interfaceDbBilling import getRootInterface
billingDb = getRootInterface().db
billingSettings = billingDb.getRecordset(BillingSettings, recordFilter={"mandateId": mandateId}) or []
for bs in billingSettings:
billingDb.recordDelete(BillingSettings, bs.get("id"))

View file

@ -139,13 +139,13 @@ def storeDebugMessageAndDocuments(message, currentUser, mandateId=None, featureI
try:
import os
from datetime import datetime, UTC
from modules.shared.debugLogger import _getBaseDebugDir, _ensureDir
from modules.shared.debugLogger import getBaseDebugDir, ensureDir
from modules.interfaces.interfaceDbManagement import getInterface
# Create base debug directory (use base debug dir, not prompts subdirectory)
baseDebugDir = _getBaseDebugDir()
baseDebugDir = getBaseDebugDir()
debug_root = os.path.join(baseDebugDir, 'messages')
_ensureDir(debug_root)
ensureDir(debug_root)
# Generate timestamp
timestamp = datetime.now(UTC).strftime('%Y%m%d-%H%M%S-%f')[:-3]
@ -210,7 +210,7 @@ def storeDebugMessageAndDocuments(message, currentUser, mandateId=None, featureI
safe_label = "default"
label_folder = os.path.join(message_path, safe_label)
_ensureDir(label_folder)
ensureDir(label_folder)
# Store each document
for i, doc in enumerate(docs):
@ -401,8 +401,8 @@ class ChatObjects:
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
from modules.connectors.connectorDbPostgre import _get_cached_connector
self.db = _get_cached_connector(
from modules.connectors.connectorDbPostgre import getCachedConnector
self.db = getCachedConnector(
dbHost=dbHost,
dbDatabase=dbDatabase,
dbUser=dbUser,

View file

@ -204,19 +204,20 @@ def get_chatbot_threads(
normalized_wf["maxSteps"] = 10
normalized_workflows.append(normalized_wf)
metadata = PaginationMetadata(
currentPage=paginationParams.page if paginationParams else 1,
pageSize=paginationParams.pageSize if paginationParams else len(workflows),
totalItems=totalItems,
totalPages=totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None
)
return PaginatedResponse(
items=normalized_workflows,
pagination=metadata
)
from modules.routes.routeHelpers import enrichRowsWithFkLabels
enriched = enrichRowsWithFkLabels(normalized_workflows, ChatbotConversation)
return {
"items": enriched,
"pagination": PaginationMetadata(
currentPage=paginationParams.page if paginationParams else 1,
pageSize=paginationParams.pageSize if paginationParams else len(workflows),
totalItems=totalItems,
totalPages=totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None
).model_dump(),
}
except HTTPException:
raise

View file

@ -336,10 +336,10 @@ async def startSession(
try:
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
voiceInterface = getVoiceInterface(context.user, mandateId)
from .serviceCommcoach import _getUserVoicePrefs, _stripMarkdownForTts, _buildTtsConfigErrorMessage
language, voiceName = _getUserVoicePrefs(userId, mandateId)
from .serviceCommcoach import getUserVoicePrefs, stripMarkdownForTts, buildTtsConfigErrorMessage
language, voiceName = getUserVoicePrefs(userId, mandateId)
ttsResult = await voiceInterface.textToSpeech(
text=_stripMarkdownForTts(greetingText),
text=stripMarkdownForTts(greetingText),
languageCode=language,
voiceName=voiceName,
)
@ -584,8 +584,8 @@ async def sendAudioStream(
if not audioBody:
raise HTTPException(status_code=400, detail=routeApiMsg("No audio data received"))
from .serviceCommcoach import _getUserVoicePrefs
language, _ = _getUserVoicePrefs(str(context.user.id), mandateId)
from .serviceCommcoach import getUserVoicePrefs
language, _ = getUserVoicePrefs(str(context.user.id), mandateId)
contextId = session.get("contextId")
service = CommcoachService(context.user, mandateId, instanceId)

View file

@ -79,7 +79,7 @@ def _selectConfiguredVoice(
return None
def _buildTtsConfigErrorMessage(language: str, voiceName: Optional[str], rawError: str = "") -> str:
def buildTtsConfigErrorMessage(language: str, voiceName: Optional[str], rawError: str = "") -> str:
if voiceName:
return (
f'Die konfigurierte Stimme "{voiceName}" für {language} ist ungültig oder nicht verfügbar. '
@ -91,7 +91,7 @@ def _buildTtsConfigErrorMessage(language: str, voiceName: Optional[str], rawErro
)
def _getUserVoicePrefs(userId: str, mandateId: Optional[str] = None) -> tuple:
def getUserVoicePrefs(userId: str, mandateId: Optional[str] = None) -> tuple:
"""Load voice language and voiceName from central UserVoicePreferences.
Returns (language, voiceName) tuple."""
try:
@ -160,7 +160,7 @@ def _getUserVoicePrefs(userId: str, mandateId: Optional[str] = None) -> tuple:
return ("de-DE", None)
def _stripMarkdownForTts(text: str) -> str:
def stripMarkdownForTts(text: str) -> str:
"""Strip markdown formatting so TTS reads clean speech text."""
t = text
t = re.sub(r'\*\*(.+?)\*\*', r'\1', t)
@ -346,9 +346,9 @@ async def _generateAndEmitTts(sessionId: str, speechText: str, currentUser, mand
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
import base64
voiceInterface = getVoiceInterface(currentUser, mandateId)
language, voiceName = _getUserVoicePrefs(str(currentUser.id), mandateId)
language, voiceName = getUserVoicePrefs(str(currentUser.id), mandateId)
ttsResult = await voiceInterface.textToSpeech(
text=_stripMarkdownForTts(speechText),
text=stripMarkdownForTts(speechText),
languageCode=language,
voiceName=voiceName,
)
@ -362,7 +362,7 @@ async def _generateAndEmitTts(sessionId: str, speechText: str, currentUser, mand
return
errorDetail = ttsResult.get("error", "Text-to-Speech failed")
await emitSessionEvent(sessionId, "error", {
"message": _buildTtsConfigErrorMessage(language, voiceName, errorDetail),
"message": buildTtsConfigErrorMessage(language, voiceName, errorDetail),
"detail": errorDetail,
"ttsLanguage": language,
"ttsVoice": voiceName,
@ -370,7 +370,7 @@ async def _generateAndEmitTts(sessionId: str, speechText: str, currentUser, mand
except Exception as e:
logger.warning(f"TTS failed for session {sessionId}: {e}")
await emitSessionEvent(sessionId, "error", {
"message": _buildTtsConfigErrorMessage("de-DE", None, str(e)),
"message": buildTtsConfigErrorMessage("de-DE", None, str(e)),
"detail": str(e),
})
@ -695,7 +695,7 @@ _TTS_WORD_LIMIT = 200
async def _prepareSpeechText(fullText: str, callAiFn) -> str:
"""Prepare text for TTS. Short responses used directly; long ones get summarized."""
cleaned = _stripMarkdownForTts(fullText)
cleaned = stripMarkdownForTts(fullText)
wordCount = len(cleaned.split())
if wordCount <= _TTS_WORD_LIMIT:
return cleaned
@ -1373,7 +1373,7 @@ class CommcoachService:
from modules.interfaces.interfaceMessaging import getInterface as getMessagingInterface
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.shared.notifyMandateAdmins import _renderHtmlEmail, _resolveMandateName
from modules.shared.notifyMandateAdmins import renderHtmlEmail, resolveMandateName
rootInterface = getRootInterface()
user = rootInterface.getUser(self.userId)
@ -1382,9 +1382,9 @@ class CommcoachService:
messaging = getMessagingInterface()
subject = f"Coaching-Session Zusammenfassung: {contextTitle}"
mandateName = _resolveMandateName(self.mandateId)
mandateName = resolveMandateName(self.mandateId)
contentHtml = _buildSummaryEmailBlock(emailData, summary, contextTitle)
htmlMessage = _renderHtmlEmail(
htmlMessage = renderHtmlEmail(
"Coaching-Session Zusammenfassung",
[
f'Thema: {contextTitle}',

View file

@ -64,7 +64,7 @@ async def _runDailyReminders():
from modules.connectors.connectorDbPostgre import DatabaseConnector
from .datamodelCommcoach import CoachingUserProfile, CoachingContextStatus
from modules.interfaces.interfaceMessaging import getInterface as getMessagingInterface
from modules.shared.notifyMandateAdmins import _renderHtmlEmail, _resolveMandateName
from modules.shared.notifyMandateAdmins import renderHtmlEmail, resolveMandateName
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
db = DatabaseConnector(
@ -106,8 +106,8 @@ async def _runDailyReminders():
contextList = ", ".join(contextTitles)
subject = "Dein tägliches Coaching wartet"
mandateName = _resolveMandateName(profile.get("mandateId"))
htmlMessage = _renderHtmlEmail(
mandateName = resolveMandateName(profile.get("mandateId"))
htmlMessage = renderHtmlEmail(
"Zeit für dein tägliches Coaching",
[
f"Du hast aktuell {len(contexts)} aktive Coaching-Themen.",

View file

@ -68,8 +68,7 @@ class AutoWorkflow(PowerOnModel):
"frontend_readonly": True,
"frontend_required": False,
"label": "Mandanten-ID",
"frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label",
"fk_label_field": "label",
"fk_model": "Mandate",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
@ -81,8 +80,7 @@ class AutoWorkflow(PowerOnModel):
"frontend_readonly": True,
"frontend_required": False,
"label": "Feature-Instanz-ID",
"frontend_fk_source": "/api/features/instances",
"frontend_fk_display_field": "label",
"fk_label_field": "label",
"fk_model": "FeatureInstance",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
@ -220,6 +218,8 @@ class AutoVersion(PowerOnModel):
"frontend_readonly": True,
"frontend_required": False,
"label": "Veröffentlicht von",
"fk_model": "User",
"fk_label_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
},
)
@ -259,8 +259,7 @@ class AutoRun(PowerOnModel):
"frontend_readonly": True,
"frontend_required": False,
"label": "Mandanten-ID",
"frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label",
"fk_label_field": "label",
"fk_model": "Mandate",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
@ -273,6 +272,8 @@ class AutoRun(PowerOnModel):
"frontend_readonly": True,
"frontend_required": False,
"label": "Auslöser",
"fk_model": "User",
"fk_label_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
},
)

View file

@ -73,7 +73,7 @@ def _isMethodBoundNode(node: Mapping[str, Any]) -> bool:
return bool(node.get("_method") and node.get("_action"))
def _bindsActionFromLegacy(node: Mapping[str, Any]) -> Optional[str]:
def bindsActionFromLegacy(node: Mapping[str, Any]) -> Optional[str]:
"""Build the canonical 'method.action' identifier from a legacy node dict.
Returns None for framework-primitive nodes (trigger/flow/input/data).
@ -121,7 +121,7 @@ def _adapterFromLegacyNode(node: Mapping[str, Any]) -> Optional[NodeAdapter]:
if not _isMethodBoundNode(node):
return None
bindsAction = _bindsActionFromLegacy(node)
bindsAction = bindsActionFromLegacy(node)
if not bindsAction:
return None

View file

@ -4,6 +4,19 @@
from modules.shared.i18nRegistry import t
# Typed FeatureInstance binding (replaces legacy `string, hidden`).
# - type FeatureInstanceRef[redmine] is filtered by the DataPicker.
# - frontendType "featureInstance" is rendered by FeatureInstancePicker which
# loads /options/feature.instance?featureCode=redmine for the current mandate.
_REDMINE_INSTANCE_PARAM = {
"name": "featureInstanceId",
"type": "FeatureInstanceRef[redmine]",
"required": True,
"frontendType": "featureInstance",
"frontendOptions": {"featureCode": "redmine"},
"description": t("Redmine-Mandant"),
}
REDMINE_NODES = [
{
"id": "redmine.readTicket",
@ -11,8 +24,7 @@ REDMINE_NODES = [
"label": t("Ticket lesen"),
"description": t("Einzelnes Redmine-Ticket aus dem Mirror laden."),
"parameters": [
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Redmine Feature-Instanz-ID")},
dict(_REDMINE_INSTANCE_PARAM),
{"name": "ticketId", "type": "number", "required": True, "frontendType": "number",
"description": t("Redmine-Ticket-ID")},
],
@ -30,8 +42,7 @@ REDMINE_NODES = [
"label": t("Tickets auflisten"),
"description": t("Tickets aus dem lokalen Mirror mit Filtern (Tracker, Status, Zeitraum, Zuweisung)."),
"parameters": [
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Redmine Feature-Instanz-ID")},
dict(_REDMINE_INSTANCE_PARAM),
{"name": "trackerIds", "type": "string", "required": False, "frontendType": "text",
"description": t("Tracker-IDs (Komma-separiert)"), "default": ""},
{"name": "status", "type": "string", "required": False, "frontendType": "text",
@ -59,8 +70,7 @@ REDMINE_NODES = [
"label": t("Ticket erstellen"),
"description": t("Neues Ticket in Redmine anlegen. Mirror wird sofort aktualisiert."),
"parameters": [
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Redmine Feature-Instanz-ID")},
dict(_REDMINE_INSTANCE_PARAM),
{"name": "subject", "type": "string", "required": True, "frontendType": "text",
"description": t("Ticket-Titel")},
{"name": "trackerId", "type": "number", "required": True, "frontendType": "number",
@ -92,8 +102,7 @@ REDMINE_NODES = [
"label": t("Ticket bearbeiten"),
"description": t("Felder eines Redmine-Tickets aktualisieren. Nur gesetzte Felder werden uebertragen."),
"parameters": [
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Redmine Feature-Instanz-ID")},
dict(_REDMINE_INSTANCE_PARAM),
{"name": "ticketId", "type": "number", "required": True, "frontendType": "number",
"description": t("Ticket-ID")},
{"name": "subject", "type": "string", "required": False, "frontendType": "text",
@ -129,8 +138,7 @@ REDMINE_NODES = [
"label": t("Statistik laden"),
"description": t("Aggregierte Kennzahlen (KPIs, Durchsatz, Status-Verteilung, Backlog) aus dem Mirror."),
"parameters": [
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Redmine Feature-Instanz-ID")},
dict(_REDMINE_INSTANCE_PARAM),
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
"description": t("Zeitraum ab")},
{"name": "dateTo", "type": "string", "required": False, "frontendType": "date",
@ -154,8 +162,7 @@ REDMINE_NODES = [
"label": t("Mirror synchronisieren"),
"description": t("Tickets und Beziehungen aus Redmine in den lokalen Mirror uebernehmen."),
"parameters": [
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Redmine Feature-Instanz-ID")},
dict(_REDMINE_INSTANCE_PARAM),
{"name": "force", "type": "boolean", "required": False, "frontendType": "checkbox",
"description": t("Vollsync erzwingen (ignoriert lastSyncAt)"), "default": False},
],

View file

@ -3,6 +3,20 @@
from modules.shared.i18nRegistry import t
# Typed FeatureInstance binding (replaces legacy `string, hidden`).
# - type uses the discriminator notation `FeatureInstanceRef[<code>]` so the
# DataPicker / RequiredAttributePicker can filter compatible upstream paths.
# - frontendType "featureInstance" is rendered by FeatureInstancePicker which
# loads /options/feature.instance?featureCode=trustee for the current mandate.
_TRUSTEE_INSTANCE_PARAM = {
"name": "featureInstanceId",
"type": "FeatureInstanceRef[trustee]",
"required": True,
"frontendType": "featureInstance",
"frontendOptions": {"featureCode": "trustee"},
"description": t("Trustee-Mandant"),
}
TRUSTEE_NODES = [
{
"id": "trustee.refreshAccountingData",
@ -10,8 +24,7 @@ TRUSTEE_NODES = [
"label": t("Buchhaltungsdaten aktualisieren"),
"description": t("Buchhaltungsdaten aus externem System importieren/aktualisieren."),
"parameters": [
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Trustee Feature-Instanz-ID")},
dict(_TRUSTEE_INSTANCE_PARAM),
{"name": "forceRefresh", "type": "boolean", "required": False, "frontendType": "checkbox",
"description": t("Import erzwingen"), "default": False},
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
@ -39,8 +52,7 @@ TRUSTEE_NODES = [
{"name": "sharepointFolder", "type": "string", "required": False, "frontendType": "sharepointFolder",
"frontendOptions": {"dependsOn": "connectionReference"},
"description": t("SharePoint-Ordnerpfad"), "default": ""},
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Trustee Feature-Instanz-ID")},
dict(_TRUSTEE_INSTANCE_PARAM),
{"name": "prompt", "type": "string", "required": False, "frontendType": "textarea",
"description": t("AI-Prompt für Extraktion"), "default": ""},
],
@ -62,12 +74,11 @@ TRUSTEE_NODES = [
"description": t("TrusteeDocument + TrusteePosition aus Extraktionsergebnis erstellen."),
"parameters": [
# Type matches what producers actually emit: ActionResult.documents
# is `List[ActionDocument]` (see datamodelChat.ActionResult). The
# is List[ActionDocument] (see datamodelChat.ActionResult). The
# DataPicker uses this string to filter compatible upstream paths.
{"name": "documentList", "type": "List[ActionDocument]", "required": True, "frontendType": "dataRef",
"description": t("Dokumentenliste eines Upstream-Producers (z.B. trustee.extractFromFiles → documents); via expliziten DataRef im Graph zu binden — Pick-not-Push, kein Auto-Wire")},
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Trustee Feature-Instanz-ID")},
"description": t("Dokumentenliste — gebunden via DataRef.")},
dict(_TRUSTEE_INSTANCE_PARAM),
],
"inputs": 1,
"outputs": 1,
@ -83,13 +94,9 @@ TRUSTEE_NODES = [
"label": t("In Buchhaltung synchronisieren"),
"description": t("Trustee-Positionen in Buchhaltungssystem übertragen."),
"parameters": [
# Type matches what producers actually emit: ActionResult.documents
# is `List[ActionDocument]` (see datamodelChat.ActionResult). The
# DataPicker uses this string to filter compatible upstream paths.
{"name": "documentList", "type": "List[ActionDocument]", "required": True, "frontendType": "dataRef",
"description": t("Verarbeitete Dokumentenliste eines Upstream-Producers (z.B. trustee.processDocuments → documents); via expliziten DataRef im Graph zu binden — Pick-not-Push, kein Auto-Wire")},
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Trustee Feature-Instanz-ID")},
"description": t("Verarbeitete Dokumentenliste — gebunden via DataRef.")},
dict(_TRUSTEE_INSTANCE_PARAM),
],
"inputs": 1,
"outputs": 1,
@ -105,8 +112,7 @@ TRUSTEE_NODES = [
"label": t("Treuhand-Daten abfragen"),
"description": t("Daten aus der Trustee-DB lesen (Lookup, Aggregation, Roh-Export). Pendant zu refreshAccountingData ohne externen Sync."),
"parameters": [
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
"description": t("Trustee Feature-Instanz-ID")},
dict(_TRUSTEE_INSTANCE_PARAM),
{"name": "mode", "type": "string", "required": True, "frontendType": "select",
"frontendOptions": {"options": ["lookup", "raw", "aggregate"]},
"description": t("Abfragemodus"), "default": "lookup"},

View file

@ -9,7 +9,7 @@ import logging
from typing import Dict, List, Any, Optional
from modules.features.graphicalEditor.nodeDefinitions import STATIC_NODE_TYPES
from modules.features.graphicalEditor.nodeAdapter import _bindsActionFromLegacy
from modules.features.graphicalEditor.nodeAdapter import bindsActionFromLegacy
from modules.features.graphicalEditor.portTypes import PORT_TYPE_CATALOG, SYSTEM_VARIABLES
from modules.shared.i18nRegistry import normalizePrimaryLanguageTag, resolveText
@ -50,7 +50,7 @@ def _localizeNode(node: Dict[str, Any], language: str) -> Dict[str, Any]:
fields.
"""
lang = normalizePrimaryLanguageTag(language, "en")
bindsAction = _bindsActionFromLegacy(node)
bindsAction = bindsActionFromLegacy(node)
out = dict(node)
for key in list(out.keys()):
if key.startswith("_"):

View file

@ -610,7 +610,7 @@ SYSTEM_VARIABLES: Dict[str, Dict[str, str]] = {
}
def _resolveSystemVariable(variable: str, context: Dict[str, Any]) -> Any:
def resolveSystemVariable(variable: str, context: Dict[str, Any]) -> Any:
"""Resolve a system variable name to its runtime value."""
from datetime import datetime, timezone
@ -642,7 +642,7 @@ def _resolveSystemVariable(variable: str, context: Dict[str, Any]) -> Any:
# Output normalizers
# ---------------------------------------------------------------------------
def _normalizeToSchema(raw: Any, schemaName: str) -> Dict[str, Any]:
def normalizeToSchema(raw: Any, schemaName: str) -> Dict[str, Any]:
"""
Normalize raw executor output to match the declared port schema.
Ensures _success/_error meta-fields are always present.
@ -696,12 +696,12 @@ def _normalizeError(error: Exception, schemaName: str) -> Dict[str, Any]:
# Transit helpers
# ---------------------------------------------------------------------------
def _wrapTransit(data: Any, meta: Dict[str, Any]) -> Dict[str, Any]:
def wrapTransit(data: Any, meta: Dict[str, Any]) -> Dict[str, Any]:
"""Wrap data in a Transit envelope."""
return {"_transit": True, "_meta": meta, "data": data}
def _unwrapTransit(output: Any) -> Any:
def unwrapTransit(output: Any) -> Any:
"""Unwrap a Transit envelope, returning the inner data."""
if isinstance(output, dict) and output.get("_transit"):
return output.get("data")
@ -726,10 +726,10 @@ def _resolveTransitChain(
return out
sources = connectionMap.get(current, [])
if not sources:
return _unwrapTransit(out)
return unwrapTransit(out)
srcId = sources[0][0] if sources else None
if not srcId:
return _unwrapTransit(out)
return unwrapTransit(out)
current = srcId
return nodeOutputs.get(nodeId)
@ -738,7 +738,7 @@ def _resolveTransitChain(
# Schema derivation for dynamic outputs
# ---------------------------------------------------------------------------
def _derive_form_payload_schema_from_param(node: Dict[str, Any], param_key: str) -> Optional[PortSchema]:
def deriveFormPayloadSchemaFromParam(node: Dict[str, Any], param_key: str) -> Optional[PortSchema]:
"""Derive output schema from a field-builder JSON list (``fields``, ``formFields``, …)."""
fields_param = (node.get("parameters") or {}).get(param_key)
if not fields_param or not isinstance(fields_param, list):
@ -776,7 +776,7 @@ def _derive_form_payload_schema_from_param(node: Dict[str, Any], param_key: str)
def _deriveFormPayloadSchema(node: Dict[str, Any]) -> Optional[PortSchema]:
"""Derive output schema from form field definitions (``parameters.fields``)."""
return _derive_form_payload_schema_from_param(node, "fields")
return deriveFormPayloadSchemaFromParam(node, "fields")
def parse_graph_defined_output_schema(
@ -796,9 +796,9 @@ def parse_graph_defined_output_schema(
schema_spec = output_port.get("schema")
if isinstance(schema_spec, dict) and schema_spec.get("kind") == "fromGraph":
param_key = str(schema_spec.get("parameter") or "fields")
return _derive_form_payload_schema_from_param(node, param_key)
return deriveFormPayloadSchemaFromParam(node, param_key)
if output_port.get("dynamic") and output_port.get("deriveFrom"):
return _derive_form_payload_schema_from_param(node, str(output_port.get("deriveFrom")))
return deriveFormPayloadSchemaFromParam(node, str(output_port.get("deriveFrom")))
if isinstance(schema_spec, str) and schema_spec:
return PORT_TYPE_CATALOG.get(schema_spec)
return None

View file

@ -14,7 +14,7 @@ from fastapi import APIRouter, Depends, Path, Query, Body, Request, HTTPExceptio
from fastapi.responses import JSONResponse, StreamingResponse, Response
from modules.auth import limiter, getRequestContext, RequestContext
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
from modules.routes.routeHelpers import _applyFiltersAndSort
from modules.routes.routeHelpers import applyFiltersAndSort
from modules.features.graphicalEditor.mainGraphicalEditor import getGraphicalEditorServices
from modules.features.graphicalEditor.nodeRegistry import getNodeTypesForApi
@ -230,6 +230,65 @@ def get_user_connection_options(
return {"options": options}
@router.get("/{instanceId}/options/feature.instance")
@limiter.limit("60/minute")
def get_feature_instance_options(
request: Request,
instanceId: str = Path(..., description="GraphicalEditor feature instance ID (workflow context)"),
featureCode: str = Query(..., description="Feature code to filter by (e.g. 'trustee', 'redmine', 'clickup')"),
enabledOnly: bool = Query(True, description="If true (default), only enabled feature instances are returned"),
context: RequestContext = Depends(getRequestContext),
) -> dict:
"""Return mandate-scoped FeatureInstances for the given featureCode.
Used by node parameters with frontendType='featureInstance' (e.g. Trustee
or Redmine nodes that need to bind to a specific tenant FeatureInstance).
Always restricted to the calling user's mandate (derived from the workflow
feature instance) so the picker never leaks foreign-mandate instances.
Response: { options: [ { value: "<id>", label: "<displayName> ([code])" } ] }
"""
mandateId = _validateInstanceAccess(instanceId, context)
if not context.user:
raise HTTPException(status_code=401, detail=routeApiMsg("Authentication required"))
code = (featureCode or "").strip().lower()
if not code:
raise HTTPException(status_code=400, detail=routeApiMsg("featureCode query parameter is required"))
if not mandateId:
return {"options": []}
from modules.interfaces.interfaceDbApp import getRootInterface
rootInterface = getRootInterface()
try:
instances = rootInterface.getFeatureInstancesByMandate(
mandateId, enabledOnly=bool(enabledOnly)
) or []
except Exception as e:
logger.error(
"get_feature_instance_options: failed to load instances mandateId=%s: %s",
mandateId, e, exc_info=True,
)
return {"options": []}
options: List[Dict[str, str]] = []
for fi in instances:
fiCode = (getattr(fi, "featureCode", "") or "").strip().lower()
if fiCode != code:
continue
fiId = str(getattr(fi, "id", "") or "")
if not fiId:
continue
rawLabel = getattr(fi, "label", None) or getattr(fi, "name", None) or fiId
options.append({"value": fiId, "label": f"{rawLabel} ({fiCode})"})
logger.info(
"graphicalEditor feature.instance options: instanceId=%s mandateId=%s "
"featureCode=%s enabledOnly=%s -> %d options",
instanceId, mandateId, code, enabledOnly, len(options),
)
return {"options": options}
@router.post("/{instanceId}/execute")
@limiter.limit("30/minute")
async def post_execute(
@ -474,6 +533,10 @@ def get_templates(
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
templates = iface.getTemplates(scope=scope)
from modules.routes.routeHelpers import enrichRowsWithFkLabels
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import AutoWorkflow
enrichRowsWithFkLabels(templates, AutoWorkflow)
paginationParams = None
if pagination:
try:
@ -485,7 +548,7 @@ def get_templates(
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
if paginationParams:
filtered = _applyFiltersAndSort(templates, paginationParams)
filtered = applyFiltersAndSort(templates, paginationParams)
totalItems = len(filtered)
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
@ -906,15 +969,15 @@ async def _runEditorAgent(
enrichedPrompt = prompt
if dataSourceIds:
from modules.features.workspace.routeFeatureWorkspace import _buildDataSourceContext
from modules.features.workspace.routeFeatureWorkspace import buildDataSourceContext
chatSvc = getService("chat", ctx)
dsInfo = _buildDataSourceContext(chatSvc, dataSourceIds)
dsInfo = buildDataSourceContext(chatSvc, dataSourceIds)
if dsInfo:
enrichedPrompt = f"{prompt}\n\n[Active Data Sources]\n{dsInfo}"
if featureDataSourceIds:
from modules.features.workspace.routeFeatureWorkspace import _buildFeatureDataSourceContext
fdsInfo = _buildFeatureDataSourceContext(featureDataSourceIds)
from modules.features.workspace.routeFeatureWorkspace import buildFeatureDataSourceContext
fdsInfo = buildFeatureDataSourceContext(featureDataSourceIds)
if fdsInfo:
enrichedPrompt = f"{enrichedPrompt}\n\n[Attached Feature Data Sources]\n{fdsInfo}"
@ -1224,7 +1287,7 @@ def get_workflows(
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
if paginationParams:
filtered = _applyFiltersAndSort(enriched, paginationParams)
filtered = applyFiltersAndSort(enriched, paginationParams)
totalItems = len(filtered)
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
startIdx = (paginationParams.page - 1) * paginationParams.pageSize

View file

@ -48,7 +48,7 @@ from modules.features.redmine.interfaceFeatureRedmine import (
RedmineObjects,
getInterface,
)
from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
from modules.features.redmine.serviceRedmineStatsCache import getStatsCache
logger = logging.getLogger(__name__)
@ -334,7 +334,7 @@ def getTicket(
def _invalidateCache(featureInstanceId: str) -> None:
try:
_getStatsCache().invalidateInstance(featureInstanceId)
getStatsCache().invalidateInstance(featureInstanceId)
except Exception as e:
logger.warning(f"Failed to invalidate stats cache for {featureInstanceId}: {e}")

View file

@ -38,7 +38,7 @@ from modules.features.redmine.datamodelRedmine import (
RedmineThroughputBucket,
RedmineTicketDto,
)
from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
from modules.features.redmine.serviceRedmineStatsCache import getStatsCache
logger = logging.getLogger(__name__)
@ -69,7 +69,7 @@ async def getStats(
if status_norm not in {"*", "open", "closed"}:
status_norm = "*"
cache = _getStatsCache()
cache = getStatsCache()
# Cache key now includes the new dimensions so different filter combos
# don't collide. ``_freeze`` (in the cache module) hashes lists/sets
# for us, so we can pass them directly as extra dimensions.

View file

@ -123,7 +123,7 @@ class RedmineStatsCache:
_globalCache: Optional[RedmineStatsCache] = None
def _getStatsCache() -> RedmineStatsCache:
def getStatsCache() -> RedmineStatsCache:
"""Process-wide singleton."""
global _globalCache
if _globalCache is None:

View file

@ -38,7 +38,7 @@ from modules.features.redmine.datamodelRedmine import (
RedmineTicketMirror,
)
from modules.features.redmine.interfaceFeatureRedmine import getInterface
from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
from modules.features.redmine.serviceRedmineStatsCache import getStatsCache
logger = logging.getLogger(__name__)
@ -134,7 +134,7 @@ async def runSync(
durationMs=duration_ms,
lastSyncAt=now_epoch,
)
_getStatsCache().invalidateInstance(featureInstanceId)
getStatsCache().invalidateInstance(featureInstanceId)
return RedmineSyncResultDto(
instanceId=featureInstanceId,
@ -188,7 +188,7 @@ async def upsertSingleTicket(
now_epoch = time.time()
_upsertTicket(iface, featureInstanceId, mandateId, issue, now_epoch)
relations_upserted = _replaceRelations(iface, featureInstanceId, issue, now_epoch)
_getStatsCache().invalidateInstance(featureInstanceId)
getStatsCache().invalidateInstance(featureInstanceId)
return relations_upserted
@ -202,7 +202,7 @@ def deleteMirroredTicket(
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
deleted = iface.deleteMirroredTicket(featureInstanceId, int(issueId))
iface.deleteMirroredRelationsForIssue(featureInstanceId, int(issueId))
_getStatsCache().invalidateInstance(featureInstanceId)
getStatsCache().invalidateInstance(featureInstanceId)
return deleted

View file

@ -383,7 +383,7 @@ async def streamSession(
async def _eventGenerator():
"""Generate SSE events from the session event queue."""
from .service import _sessionEvents
from .service import sessionEvents
# Send initial session state
yield f"data: {json.dumps({'type': 'sessionState', 'data': session})}\n\n"
@ -394,10 +394,10 @@ async def streamSession(
yield f"data: {json.dumps({'type': 'botConnectionState', 'data': {'connected': _getActiveService(sessionId) is not None}})}\n\n"
# Stream events
eventQueue = _sessionEvents.get(sessionId)
eventQueue = sessionEvents.get(sessionId)
if not eventQueue:
_sessionEvents[sessionId] = asyncio.Queue()
eventQueue = _sessionEvents[sessionId]
sessionEvents[sessionId] = asyncio.Queue()
eventQueue = sessionEvents[sessionId]
try:
while True:
@ -810,8 +810,8 @@ async def deleteUserAccount(
# MFA Code Submission (relayed to active bot session)
# =========================================================================
_mfaCodeQueues: dict = {}
_mfaWaitTasks: dict = {}
mfaCodeQueues: dict = {}
mfaWaitTasks: dict = {}
@router.post("/{instanceId}/sessions/{sessionId}/mfa")
@limiter.limit("10/minute")
@ -834,7 +834,7 @@ async def submitMfaCode(
logger.info(f"MFA submission for session {sessionId}: action={mfaAction}, codeLen={len(mfaCode)}")
queue = _mfaCodeQueues.get(sessionId)
queue = mfaCodeQueues.get(sessionId)
if queue:
await queue.put({"action": mfaAction, "code": mfaCode})
return {"submitted": True}
@ -981,7 +981,7 @@ async def testVoice(
):
"""Test TTS voice with AI-generated sample text in the correct language."""
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
from .service import _createAiService
from .service import createAiService
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
mandateId = _validateInstanceAccess(instanceId, context)
@ -992,7 +992,7 @@ async def testVoice(
botName = body.get("botName", "AI Assistant")
try:
aiService = _createAiService(context.user, mandateId, instanceId)
aiService = createAiService(context.user, mandateId, instanceId)
await aiService.ensureAiObjectsInitialized()
aiRequest = AiCallRequest(

View file

@ -532,7 +532,7 @@ def getActiveService(sessionId: str) -> Optional["TeamsbotService"]:
# AI Service Factory (for billing-aware AI calls)
# =========================================================================
def _createAiService(user, mandateId, featureInstanceId=None):
def createAiService(user, mandateId, featureInstanceId=None):
"""Create a properly wired AiService via the service center."""
ctx = ServiceCenterContext(
user=user,
@ -546,15 +546,15 @@ def _createAiService(user, mandateId, featureInstanceId=None):
# =========================================================================
# Session Event Queues (for SSE streaming to frontend)
# =========================================================================
_sessionEvents: Dict[str, asyncio.Queue] = {}
sessionEvents: Dict[str, asyncio.Queue] = {}
async def _emitSessionEvent(sessionId: str, eventType: str, data: Any):
"""Emit an event to the session's SSE stream.
Creates the queue on-demand so events are never silently dropped."""
if sessionId not in _sessionEvents:
_sessionEvents[sessionId] = asyncio.Queue()
await _sessionEvents[sessionId].put({"type": eventType, "data": data, "timestamp": getIsoTimestamp()})
if sessionId not in sessionEvents:
sessionEvents[sessionId] = asyncio.Queue()
await sessionEvents[sessionId].put({"type": eventType, "data": data, "timestamp": getIsoTimestamp()})
def _normalizeGatewayHostForBotWs(host: str) -> str:
@ -709,7 +709,7 @@ class TeamsbotService:
interface = interfaceDb.getInterface(self.currentUser, self.mandateId, self.instanceId)
# Initialize SSE event queue
_sessionEvents[sessionId] = asyncio.Queue()
sessionEvents[sessionId] = asyncio.Queue()
try:
# Update status to JOINING
@ -798,7 +798,7 @@ class TeamsbotService:
})
# Cleanup event queue
_sessionEvents.pop(sessionId, None)
sessionEvents.pop(sessionId, None)
# =========================================================================
# Browser Bot WebSocket Communication
@ -1048,9 +1048,9 @@ class TeamsbotService:
"timestamp": getIsoTimestamp(),
})
from .routeFeatureTeamsbot import _mfaCodeQueues, _mfaWaitTasks
from .routeFeatureTeamsbot import mfaCodeQueues, mfaWaitTasks
mfaQueue = asyncio.Queue()
_mfaCodeQueues[sessionId] = mfaQueue
mfaCodeQueues[sessionId] = mfaQueue
async def _waitAndForwardMfa(sid, queue, ws):
try:
@ -1075,10 +1075,10 @@ class TeamsbotService:
except asyncio.CancelledError:
logger.info(f"[WS] MFA wait cancelled for session {sid} (resolved via page)")
finally:
_mfaCodeQueues.pop(sid, None)
_mfaWaitTasks.pop(sid, None)
mfaCodeQueues.pop(sid, None)
mfaWaitTasks.pop(sid, None)
_mfaWaitTasks[sessionId] = asyncio.create_task(
mfaWaitTasks[sessionId] = asyncio.create_task(
_waitAndForwardMfa(sessionId, mfaQueue, websocket)
)
@ -1100,11 +1100,11 @@ class TeamsbotService:
elif msgType == "mfaResolved":
success = message.get("success", False)
logger.info(f"[WS] MFA resolved: success={success}")
from .routeFeatureTeamsbot import _mfaCodeQueues, _mfaWaitTasks
task = _mfaWaitTasks.pop(sessionId, None)
from .routeFeatureTeamsbot import mfaCodeQueues, mfaWaitTasks
task = mfaWaitTasks.pop(sessionId, None)
if task and not task.done():
task.cancel()
_mfaCodeQueues.pop(sessionId, None)
mfaCodeQueues.pop(sessionId, None)
await _emitSessionEvent(sessionId, "mfaResolved", {
"success": success,
"timestamp": getIsoTimestamp(),
@ -1844,7 +1844,7 @@ class TeamsbotService:
)
try:
aiService = _createAiService(
aiService = createAiService(
self.currentUser, self.mandateId, self.instanceId
)
await aiService.ensureAiObjectsInitialized()
@ -1976,7 +1976,7 @@ class TeamsbotService:
)
try:
aiService = _createAiService(
aiService = createAiService(
self.currentUser, self.mandateId, self.instanceId
)
await aiService.ensureAiObjectsInitialized()
@ -2195,7 +2195,7 @@ class TeamsbotService:
# Call SPEECH_TEAMS
try:
aiService = _createAiService(self.currentUser, self.mandateId, self.instanceId)
aiService = createAiService(self.currentUser, self.mandateId, self.instanceId)
await aiService.ensureAiObjectsInitialized()
request = AiCallRequest(
@ -3767,7 +3767,7 @@ class TeamsbotService:
)
try:
aiService = _createAiService(
aiService = createAiService(
self.currentUser, self.mandateId, self.instanceId
)
await aiService.ensureAiObjectsInitialized()
@ -3930,7 +3930,7 @@ class TeamsbotService:
"""Summarize a long user-provided session context to its essential points.
This reduces token usage in every subsequent AI call."""
try:
aiService = _createAiService(self.currentUser, self.mandateId, self.instanceId)
aiService = createAiService(self.currentUser, self.mandateId, self.instanceId)
await aiService.ensureAiObjectsInitialized()
request = AiCallRequest(
@ -3980,7 +3980,7 @@ class TeamsbotService:
lines.append(f"[{speaker}]: {text}")
textToSummarize = "\n".join(lines)
aiService = _createAiService(self.currentUser, self.mandateId, self.instanceId)
aiService = createAiService(self.currentUser, self.mandateId, self.instanceId)
await aiService.ensureAiObjectsInitialized()
request = AiCallRequest(
@ -4021,7 +4021,7 @@ class TeamsbotService:
for t in transcripts
)
aiService = _createAiService(self.currentUser, self.mandateId, self.instanceId)
aiService = createAiService(self.currentUser, self.mandateId, self.instanceId)
await aiService.ensureAiObjectsInitialized()
request = AiCallRequest(

View file

@ -16,7 +16,7 @@ from .accountingConnectorBase import (
AccountingChart,
SyncResult,
)
from .accountingRegistry import _getAccountingRegistry
from .accountingRegistry import getAccountingRegistry
logger = logging.getLogger(__name__)
@ -26,7 +26,7 @@ class AccountingBridge:
def __init__(self, trusteeInterface):
self._trusteeInterface = trusteeInterface
self._registry = _getAccountingRegistry()
self._registry = getAccountingRegistry()
async def getActiveConfig(self, featureInstanceId: str) -> Optional[Dict[str, Any]]:
"""Load the active TrusteeAccountingConfig for a feature instance."""

View file

@ -39,6 +39,26 @@ class AccountingChart(BaseModel):
accountType: Optional[str] = None
class AccountingPeriodBalance(BaseModel):
"""Balance snapshot for one account in one period.
Mirrors the `TrusteeDataAccountBalance` table 1:1 so
`accountingDataSync._persistBalances` can persist connector output without
re-mapping. `closingBalance` is always the *cumulative* balance at the end
of the period (NOT the period's net movement). `periodMonth=0` denotes the
annual bucket (closing balance per fiscal year-end).
"""
accountNumber: str
periodYear: int
periodMonth: int = 0
openingBalance: float = 0.0
debitTotal: float = 0.0
creditTotal: float = 0.0
closingBalance: float = 0.0
currency: str = "CHF"
asOfDate: Optional[str] = None
class SyncResult(BaseModel):
"""Result of a sync operation."""
success: bool
@ -126,6 +146,31 @@ class BaseAccountingConnector(ABC):
accountNumbers: pre-fetched account numbers (avoids redundant API call). Override in connectors that support it."""
return []
async def getAccountBalances(
self,
config: Dict[str, Any],
years: List[int],
accountNumbers: Optional[List[str]] = None,
) -> List[AccountingPeriodBalance]:
"""Read closing balances per account and period from the external system.
Contract:
- One row per (accountNumber, periodYear, periodMonth).
- `periodMonth=0` => annual bucket (closing balance per fiscal year-end).
- `periodMonth=1..12` => closing balance per end of that calendar month.
- `closingBalance` MUST be the *cumulative* balance at period end,
including all prior-year carry-over and yearend bookings -- NOT the
period's net movement.
- `openingBalance` MUST be the cumulative balance at period start
(= previous period's closingBalance).
Default returns []; `AccountingDataSync` will then fall back to a
local cumulative aggregation from journal lines. Override in
connectors that can fetch authoritative balances from the source
system (e.g. RMA `/gl/saldo`).
"""
return []
async def uploadDocument(
self,
config: Dict[str, Any],

View file

@ -25,7 +25,7 @@ from pathlib import Path
from typing import Callable, Dict, Any, List, Optional, Type
from .accountingConnectorBase import BaseAccountingConnector
from .accountingRegistry import _getAccountingRegistry
from .accountingRegistry import getAccountingRegistry
logger = logging.getLogger(__name__)
@ -33,6 +33,72 @@ logger = logging.getLogger(__name__)
_HEARTBEAT_EVERY = 500
def _isIncomeStatementAccount(accountNumber: str) -> bool:
"""Swiss KMU-Kontenrahmen heuristic: 1xxx + 2xxx -> balance sheet
(cumulative carry-over across years); 3xxx..9xxx -> income statement
(reset to 0 at fiscal-year start). Used by the local fallback only;
when a connector returns balances, those values are used verbatim.
"""
a = (accountNumber or "").strip()
if not a or not a[0].isdigit():
return False
return a[0] not in ("1", "2")
def _resolveBalanceYears(
dateFrom: Optional[str],
dateTo: Optional[str],
oldestBookingDate: Optional[str],
newestBookingDate: Optional[str],
) -> List[int]:
"""Derive the list of years for which the connector should compute balances.
Prefers the ``dateFrom``/``dateTo`` import window the user requested. Falls
back to the actual oldest/newest booking date observed in the imported
journal (so e.g. a `dateTo=None` import still produces balances for every
year that has data). If nothing is known, returns the current year as a
sensible default.
"""
def _yearOf(s: Optional[str]) -> Optional[int]:
if not s:
return None
try:
return int(str(s)[:4])
except (TypeError, ValueError):
return None
fromYear = _yearOf(dateFrom) or _yearOf(oldestBookingDate)
toYear = _yearOf(dateTo) or _yearOf(newestBookingDate)
if fromYear is None and toYear is None:
return [time.gmtime().tm_year]
if fromYear is None:
fromYear = toYear
if toYear is None:
toYear = fromYear
if toYear < fromYear:
fromYear, toYear = toYear, fromYear
return list(range(fromYear, toYear + 1))
def _balanceModelToRow(b: Any, scope: Dict[str, Any]) -> Dict[str, Any]:
"""Map an ``AccountingPeriodBalance`` (or compatible dict) to a DB row."""
if isinstance(b, dict):
get = b.get
else:
get = lambda k, default=None: getattr(b, k, default)
return {
"accountNumber": str(get("accountNumber", "") or ""),
"periodYear": int(get("periodYear", 0) or 0),
"periodMonth": int(get("periodMonth", 0) or 0),
"openingBalance": round(float(get("openingBalance", 0) or 0), 2),
"debitTotal": round(float(get("debitTotal", 0) or 0), 2),
"creditTotal": round(float(get("creditTotal", 0) or 0), 2),
"closingBalance": round(float(get("closingBalance", 0) or 0), 2),
"currency": str(get("currency", "CHF") or "CHF"),
**scope,
}
def _isDebugDumpEnabled() -> bool:
"""Whether to write raw connector payloads to disk for offline inspection.
@ -101,7 +167,7 @@ class AccountingDataSync:
def __init__(self, trusteeInterface):
self._if = trusteeInterface
self._registry = _getAccountingRegistry()
self._registry = getAccountingRegistry()
async def importData(
self,
@ -246,18 +312,39 @@ class AccountingDataSync:
logger.error(f"Import contacts failed: {e}", exc_info=True)
summary["errors"].append(f"Contacts: {e}")
# ---- Phase 4: Compute account balances ----
# Progress budget: 90-95 %. Pure DB aggregation, no external calls.
# ---- Phase 4: Account balances ----
# Progress budget: 88-95 %. Connector first (RMA -> /gl/saldo, Bexio
# & Abacus -> aggregated journal). On empty/failed connector output
# we fall back to a *correct* cumulative aggregation from the
# journal lines we just persisted.
connectorBalances: list = []
balanceSource = "local-fallback"
try:
_progress(90, "Berechne Kontensaldi...")
_progress(88, "Lade Kontensaldi vom Buchhaltungssystem...")
balanceYears = _resolveBalanceYears(dateFrom, dateTo, summary.get("oldestBookingDate"), summary.get("newestBookingDate"))
connectorBalances = await connector.getAccountBalances(
connConfig,
years=balanceYears,
accountNumbers=fetchedAccountNumbers or None,
)
_dumpSyncData("accountBalances", connectorBalances)
if connectorBalances:
balanceSource = "connector"
except Exception as e:
logger.warning(f"Connector getAccountBalances failed, will use local fallback: {e}", exc_info=True)
summary["errors"].append(f"Balances connector: {e}")
try:
_progress(92, "Speichere Kontensaldi...")
balanceCount = await asyncio.to_thread(
self._persistBalances, featureInstanceId, mandateId,
TrusteeDataJournalEntry, TrusteeDataJournalLine, TrusteeDataAccountBalance,
connectorBalances, balanceSource,
)
summary["accountBalances"] = balanceCount
_progress(95, f"{balanceCount} Saldi berechnet.")
_progress(95, f"{balanceCount} Saldi gespeichert (source={balanceSource}).")
except Exception as e:
logger.error(f"Compute balances failed: {e}", exc_info=True)
logger.error(f"Persist balances failed: {e}", exc_info=True)
summary["errors"].append(f"Balances: {e}")
cfgId = cfgRecord.get("id")
@ -401,12 +488,66 @@ class AccountingDataSync:
logger.info(f"Persisted {n} contacts for {featureInstanceId} in {time.time() - t0:.1f}s")
return n
def _persistBalances(self, featureInstanceId: str, mandateId: str,
modelEntry: Type, modelLine: Type, modelBalance: Type) -> int:
"""Re-aggregate journal lines into monthly + annual balances."""
def _persistBalances(
self,
featureInstanceId: str,
mandateId: str,
modelEntry: Type,
modelLine: Type,
modelBalance: Type,
connectorBalances: list,
source: str,
) -> int:
"""Persist account balances per (account, period) into ``TrusteeDataAccountBalance``.
Source of truth (``source="connector"``): the list returned by
``BaseAccountingConnector.getAccountBalances`` is persisted 1:1.
Fallback (``source="local-fallback"``): aggregate the just-persisted
journal lines into **cumulative** balances. Unlike the previous
implementation, this version (a) carries the cumulative balance
forward across months/years for balance-sheet accounts, (b) resets
income-statement accounts at fiscal-year start, and (c) computes
``openingBalance`` correctly as the previous period's
``closingBalance``. ``openingBalance`` of the very first imported
period stays at 0 (no prior data available -- by design; see plan
document for rationale).
"""
t0 = time.time()
self._bulkClear(modelBalance, featureInstanceId)
scope = {"featureInstanceId": featureInstanceId, "mandateId": mandateId}
if connectorBalances:
rows = [_balanceModelToRow(b, scope) for b in connectorBalances]
n = self._bulkCreate(modelBalance, rows)
logger.info(
f"Persisted {n} balances for {featureInstanceId} in {time.time() - t0:.1f}s "
f"(source={source})"
)
return n
rows = self._buildLocalBalanceFallback(featureInstanceId, modelEntry, modelLine, scope)
n = self._bulkCreate(modelBalance, rows)
logger.info(
f"Persisted {n} balances for {featureInstanceId} in {time.time() - t0:.1f}s "
f"(source={source})"
)
return n
def _buildLocalBalanceFallback(
self,
featureInstanceId: str,
modelEntry: Type,
modelLine: Type,
scope: Dict[str, Any],
) -> List[Dict[str, Any]]:
"""Aggregate ``TrusteeDataJournalLine`` rows into cumulative period balances.
Returns rows ready for ``_bulkCreate``. Walks every account
chronologically through all years observed in the journal so the
cumulative balance and per-period opening are exact (within the
bounds of the imported window).
"""
entries = self._if.db.getRecordset(
modelEntry, recordFilter={"featureInstanceId": featureInstanceId},
) or []
@ -421,7 +562,9 @@ class AccountingDataSync:
modelLine, recordFilter={"featureInstanceId": featureInstanceId},
) or []
buckets: Dict[tuple, Dict[str, float]] = defaultdict(lambda: {"debit": 0.0, "credit": 0.0})
movements: Dict[tuple, Dict[str, float]] = defaultdict(lambda: {"debit": 0.0, "credit": 0.0})
observedYears: set = set()
observedAccounts: set = set()
for ln in lines:
if isinstance(ln, dict):
jeid = ln.get("journalEntryId", "")
@ -437,7 +580,7 @@ class AccountingDataSync:
bdate = entryDates.get(jeid, "")
if not accNo or not bdate:
continue
parts = bdate.split("-")
parts = str(bdate).split("-")
if len(parts) < 2:
continue
try:
@ -445,29 +588,56 @@ class AccountingDataSync:
month = int(parts[1])
except ValueError:
continue
movements[(accNo, year, month)]["debit"] += debit
movements[(accNo, year, month)]["credit"] += credit
observedYears.add(year)
observedAccounts.add(accNo)
buckets[(accNo, year, month)]["debit"] += debit
buckets[(accNo, year, month)]["credit"] += credit
buckets[(accNo, year, 0)]["debit"] += debit
buckets[(accNo, year, 0)]["credit"] += credit
if not observedYears or not observedAccounts:
return []
scope = {"featureInstanceId": featureInstanceId, "mandateId": mandateId}
rows = [{
"accountNumber": accNo,
"periodYear": year,
"periodMonth": month,
"openingBalance": 0.0,
"debitTotal": round(totals["debit"], 2),
"creditTotal": round(totals["credit"], 2),
"closingBalance": round(totals["debit"] - totals["credit"], 2),
"currency": "CHF",
**scope,
} for (accNo, year, month), totals in buckets.items()]
n = self._bulkCreate(modelBalance, rows)
logger.info(
f"Persisted {n} balances for {featureInstanceId} in {time.time() - t0:.1f}s"
)
return n
sortedYears = sorted(observedYears)
rows: List[Dict[str, Any]] = []
for accNo in sorted(observedAccounts):
isER = _isIncomeStatementAccount(accNo)
cumulativeOpeningOfYear = 0.0
for year in sortedYears:
yearOpening = 0.0 if isER else cumulativeOpeningOfYear
running = yearOpening
yearDebit = 0.0
yearCredit = 0.0
for month in range(1, 13):
opening = running
mov = movements.get((accNo, year, month), {"debit": 0.0, "credit": 0.0})
running = opening + mov["debit"] - mov["credit"]
yearDebit += mov["debit"]
yearCredit += mov["credit"]
if mov["debit"] == 0 and mov["credit"] == 0 and opening == 0 and running == 0:
continue
rows.append({
"accountNumber": accNo,
"periodYear": year,
"periodMonth": month,
"openingBalance": round(opening, 2),
"debitTotal": round(mov["debit"], 2),
"creditTotal": round(mov["credit"], 2),
"closingBalance": round(running, 2),
"currency": "CHF",
**scope,
})
rows.append({
"accountNumber": accNo,
"periodYear": year,
"periodMonth": 0,
"openingBalance": round(yearOpening, 2),
"debitTotal": round(yearDebit, 2),
"creditTotal": round(yearCredit, 2),
"closingBalance": round(running, 2),
"currency": "CHF",
**scope,
})
cumulativeOpeningOfYear = running
return rows
# ===== Low-level bulk helpers =====

View file

@ -74,7 +74,7 @@ class AccountingRegistry:
_registryInstance: Optional[AccountingRegistry] = None
def _getAccountingRegistry() -> AccountingRegistry:
def getAccountingRegistry() -> AccountingRegistry:
"""Singleton access to the accounting registry."""
global _registryInstance
if _registryInstance is None:

View file

@ -6,12 +6,22 @@ API docs: https://downloads.abacus.ch/fileadmin/ablage/abaconnect/htmlfiles/docs
Auth: OAuth 2.0 Client Credentials (Service User).
Each Abacus instance has its own host URL; there is no central cloud endpoint.
Entity API uses OData V4 format.
Account balances:
Abacus exposes an ``AccountBalances`` entity (per fiscal year), but its
availability depends on the customer's Abacus license / Profile and is
NOT guaranteed for all instances. The robust default is therefore to
aggregate balances locally from ``GeneralJournalEntries`` (always
present). If a future iteration confirms the entity for a specific
instance, ``getAccountBalances`` can be extended to prefer that source
via a config flag (e.g. ``useAccountBalancesEntity: true``).
"""
import base64
import calendar
import logging
import time
from typing import List, Dict, Any, Optional
from typing import List, Dict, Any, Optional, Tuple
import aiohttp
@ -19,6 +29,7 @@ from ..accountingConnectorBase import (
BaseAccountingConnector,
AccountingBooking,
AccountingChart,
AccountingPeriodBalance,
ConnectorConfigField,
SyncResult,
)
@ -27,6 +38,21 @@ from modules.shared.i18nRegistry import t
logger = logging.getLogger(__name__)
def _formatLastDayOfMonth(year: int, month: int) -> str:
lastDay = calendar.monthrange(year, month)[1]
return f"{year:04d}-{month:02d}-{lastDay:02d}"
def _isIncomeStatementAccount(accountNumber: str) -> bool:
"""Swiss KMU-Kontenrahmen heuristic: 1xxx + 2xxx -> balance sheet (cumulative);
3xxx..9xxx -> income statement (reset per fiscal year).
"""
a = (accountNumber or "").strip()
if not a or not a[0].isdigit():
return False
return a[0] not in ("1", "2")
class AccountingConnectorAbacus(BaseAccountingConnector):
def __init__(self):
@ -341,3 +367,158 @@ class AccountingConnectorAbacus(BaseAccountingConnector):
except Exception as e:
logger.error(f"Abacus getVendors error: {e}")
return []
async def getAccountBalances(
self,
config: Dict[str, Any],
years: List[int],
accountNumbers: Optional[List[str]] = None,
) -> List[AccountingPeriodBalance]:
"""Aggregate account balances from ``GeneralJournalEntries`` (OData V4).
Strategy:
1. Page through ``GET GeneralJournalEntries?$filter=JournalDate le YYYY-12-31``
until ``@odata.nextLink`` is exhausted. Including ALL prior years
is required to compute the carry-over for balance-sheet accounts.
2. Per (account, year, month) accumulate ``DebitAmount``/``CreditAmount``
from ``Lines``.
3. Income-statement accounts (3xxx-9xxx) reset to 0 per fiscal year;
balance-sheet accounts (1xxx-2xxx) carry their cumulative balance.
Optional optimization (not yet active): if the customer's Abacus
instance ships the ``AccountBalances`` OData entity, it can return
authoritative period balances directly. Detect via a probe GET on
``AccountBalances?$top=1`` and prefer that source. This is intentionally
deferred until we hit a customer where the entity is available --
the local aggregation is always-correct fallback.
"""
if not years:
return []
sortedYears = sorted({int(y) for y in years if y})
minYear = sortedYears[0]
maxYear = sortedYears[-1]
accountNumbersSet = set(accountNumbers) if accountNumbers else None
headers = await self._buildAuthHeaders(config)
if not headers:
logger.warning("Abacus getAccountBalances: no access token, skipping")
return []
rawEntries = await self._fetchAllJournalEntries(config, headers, dateTo=f"{maxYear}-12-31")
movements: Dict[Tuple[str, int, int], Dict[str, float]] = {}
seenAccounts: set = set()
for entry in rawEntries:
dateRaw = str(entry.get("JournalDate") or "")[:10]
if len(dateRaw) < 7:
continue
try:
year = int(dateRaw[:4])
month = int(dateRaw[5:7])
except ValueError:
continue
for line in (entry.get("Lines") or []):
accNo = str(line.get("AccountId") or "").strip()
if not accNo:
continue
seenAccounts.add(accNo)
try:
debit = float(line.get("DebitAmount") or 0)
credit = float(line.get("CreditAmount") or 0)
except (TypeError, ValueError):
continue
if debit == 0 and credit == 0:
continue
bucket = movements.setdefault((accNo, year, month), {"debit": 0.0, "credit": 0.0})
bucket["debit"] += debit
bucket["credit"] += credit
results: List[AccountingPeriodBalance] = []
for accNo in sorted(seenAccounts):
if accountNumbersSet is not None and accNo not in accountNumbersSet:
continue
isER = _isIncomeStatementAccount(accNo)
preMinYearBalance = 0.0
if not isER:
for (a, yr, _mo), m in movements.items():
if a == accNo and yr < minYear:
preMinYearBalance += m["debit"] - m["credit"]
cumulativeOpeningOfYear = preMinYearBalance
for year in sortedYears:
yearOpening = 0.0 if isER else cumulativeOpeningOfYear
running = yearOpening
yearDebit = 0.0
yearCredit = 0.0
for month in range(1, 13):
opening = running
mov = movements.get((accNo, year, month), {"debit": 0.0, "credit": 0.0})
running = opening + mov["debit"] - mov["credit"]
yearDebit += mov["debit"]
yearCredit += mov["credit"]
results.append(AccountingPeriodBalance(
accountNumber=accNo,
periodYear=year,
periodMonth=month,
openingBalance=round(opening, 2),
debitTotal=round(mov["debit"], 2),
creditTotal=round(mov["credit"], 2),
closingBalance=round(running, 2),
currency="CHF",
asOfDate=_formatLastDayOfMonth(year, month),
))
results.append(AccountingPeriodBalance(
accountNumber=accNo,
periodYear=year,
periodMonth=0,
openingBalance=round(yearOpening, 2),
debitTotal=round(yearDebit, 2),
creditTotal=round(yearCredit, 2),
closingBalance=round(running, 2),
currency="CHF",
asOfDate=f"{year}-12-31",
))
cumulativeOpeningOfYear = running
logger.info(
"Abacus getAccountBalances: %s rows from %s journal entries (years=%s)",
len(results), len(rawEntries), sortedYears,
)
return results
async def _fetchAllJournalEntries(
self,
config: Dict[str, Any],
headers: Dict[str, str],
dateTo: str,
) -> List[Dict[str, Any]]:
"""Page through ``GeneralJournalEntries`` (OData V4) following ``@odata.nextLink``.
We filter ``JournalDate le dateTo`` to bound the result, but include
ALL prior years (no lower bound) so cumulative balance-sheet
carry-over is correct.
"""
results: List[Dict[str, Any]] = []
baseUrl = self._buildEntityUrl(config, f"GeneralJournalEntries?$filter=JournalDate le {dateTo}")
nextUrl: Optional[str] = baseUrl
async with aiohttp.ClientSession() as session:
while nextUrl:
try:
async with session.get(nextUrl, headers=headers, timeout=aiohttp.ClientTimeout(total=60)) as resp:
if resp.status != 200:
body = await resp.text()
logger.warning("Abacus GeneralJournalEntries HTTP %s: %s", resp.status, body[:200])
break
data = await resp.json()
except Exception as ex:
logger.warning("Abacus GeneralJournalEntries request failed: %s", ex)
break
page = data.get("value") or []
if not isinstance(page, list):
break
results.extend(page)
nextUrl = data.get("@odata.nextLink")
return results

View file

@ -7,10 +7,20 @@ Auth: Personal Access Token (PAT) as Bearer token.
Base URL: https://api.bexio.com/
Note: Bexio uses internal account IDs (int), not account numbers.
The connector caches the chart of accounts to resolve accountNumber -> account_id.
Account balances:
Bexio does NOT expose a dedicated saldo endpoint (no equivalent to RMA's
``/gl/saldo``). ``getAccountBalances`` therefore aggregates balances
locally by paginating ``GET /3.0/accounting/journal`` (max 2000 rows per
page) and computing cumulative balances per (account, period). Income-
statement accounts (3xxx-9xxx in the Swiss KMU-Kontenrahmen) are reset
at the start of each fiscal year; balance-sheet accounts (1xxx-2xxx)
carry their cumulative balance across years.
"""
import calendar
import logging
from typing import List, Dict, Any, Optional
from typing import List, Dict, Any, Optional, Tuple
import aiohttp
@ -18,6 +28,7 @@ from ..accountingConnectorBase import (
BaseAccountingConnector,
AccountingBooking,
AccountingChart,
AccountingPeriodBalance,
ConnectorConfigField,
SyncResult,
)
@ -26,6 +37,23 @@ from modules.shared.i18nRegistry import t
logger = logging.getLogger(__name__)
_DEFAULT_API_BASE_URL = "https://api.bexio.com/"
_JOURNAL_PAGE_SIZE = 2000
def _formatLastDayOfMonth(year: int, month: int) -> str:
lastDay = calendar.monthrange(year, month)[1]
return f"{year:04d}-{month:02d}-{lastDay:02d}"
def _isIncomeStatementAccount(accountNumber: str) -> bool:
"""Swiss KMU-Kontenrahmen: 1xxx Aktiven + 2xxx Passiven -> balance sheet
(cumulative balance carried across years); 3xxx..9xxx -> income statement
(reset to 0 at fiscal-year start).
"""
a = (accountNumber or "").strip()
if not a or not a[0].isdigit():
return False
return a[0] not in ("1", "2")
class AccountingConnectorBexio(BaseAccountingConnector):
@ -260,3 +288,148 @@ class AccountingConnectorBexio(BaseAccountingConnector):
except Exception as e:
logger.error(f"Bexio getCustomers error: {e}")
return []
async def getAccountBalances(
self,
config: Dict[str, Any],
years: List[int],
accountNumbers: Optional[List[str]] = None,
) -> List[AccountingPeriodBalance]:
"""Aggregate account balances locally from ``/3.0/accounting/journal``.
Bexio offers no per-account saldo endpoint, so we paginate the full
journal up to the latest requested fiscal year-end and compute
opening / debit / credit / closing per (account, period). For balance-
sheet accounts the cumulative carry-over from prior years is included;
for income-statement accounts the balance is reset at the start of
every requested fiscal year (per Swiss accounting principles).
"""
if not years:
return []
sortedYears = sorted({int(y) for y in years if y})
minYear = sortedYears[0]
maxYear = sortedYears[-1]
accountNumbersSet = set(accountNumbers) if accountNumbers else None
accounts = await self._loadRawAccounts(config)
accIdToNumber: Dict[int, str] = {acc.get("id"): str(acc.get("account_no", "")) for acc in accounts if acc.get("id") is not None and acc.get("account_no") is not None}
if not accIdToNumber:
logger.warning("Bexio getAccountBalances: chart of accounts is empty -- cannot derive balances")
return []
rawEntries = await self._fetchAllJournalRows(config, dateTo=f"{maxYear}-12-31")
movements: Dict[Tuple[str, int, int], Dict[str, float]] = {}
for e in rawEntries:
dateRaw = str(e.get("date") or "")[:10]
if len(dateRaw) < 7:
continue
try:
year = int(dateRaw[:4])
month = int(dateRaw[5:7])
except ValueError:
continue
try:
amount = float(e.get("amount") or 0)
except (TypeError, ValueError):
continue
if amount == 0:
continue
debitAcc = accIdToNumber.get(e.get("debit_account_id"))
creditAcc = accIdToNumber.get(e.get("credit_account_id"))
if debitAcc:
bucket = movements.setdefault((debitAcc, year, month), {"debit": 0.0, "credit": 0.0})
bucket["debit"] += amount
if creditAcc:
bucket = movements.setdefault((creditAcc, year, month), {"debit": 0.0, "credit": 0.0})
bucket["credit"] += amount
accountsByNumber = sorted({n for n in accIdToNumber.values() if n})
results: List[AccountingPeriodBalance] = []
for accNo in accountsByNumber:
if accountNumbersSet is not None and accNo not in accountNumbersSet:
continue
isER = _isIncomeStatementAccount(accNo)
preMinYearBalance = 0.0
if not isER:
for (a, yr, _mo), m in movements.items():
if a == accNo and yr < minYear:
preMinYearBalance += m["debit"] - m["credit"]
cumulativeOpeningOfYear = preMinYearBalance
for year in sortedYears:
if isER:
yearOpening = 0.0
else:
yearOpening = cumulativeOpeningOfYear
running = yearOpening
yearDebit = 0.0
yearCredit = 0.0
for month in range(1, 13):
opening = running
mov = movements.get((accNo, year, month), {"debit": 0.0, "credit": 0.0})
running = opening + mov["debit"] - mov["credit"]
yearDebit += mov["debit"]
yearCredit += mov["credit"]
results.append(AccountingPeriodBalance(
accountNumber=accNo,
periodYear=year,
periodMonth=month,
openingBalance=round(opening, 2),
debitTotal=round(mov["debit"], 2),
creditTotal=round(mov["credit"], 2),
closingBalance=round(running, 2),
currency="CHF",
asOfDate=_formatLastDayOfMonth(year, month),
))
results.append(AccountingPeriodBalance(
accountNumber=accNo,
periodYear=year,
periodMonth=0,
openingBalance=round(yearOpening, 2),
debitTotal=round(yearDebit, 2),
creditTotal=round(yearCredit, 2),
closingBalance=round(running, 2),
currency="CHF",
asOfDate=f"{year}-12-31",
))
cumulativeOpeningOfYear = running
logger.info("Bexio getAccountBalances: %s rows from %s journal entries (years=%s)", len(results), len(rawEntries), sortedYears)
return results
async def _fetchAllJournalRows(self, config: Dict[str, Any], dateTo: str) -> List[Dict[str, Any]]:
"""Paginate ``GET /3.0/accounting/journal?to=YYYY-12-31`` and return all rows.
Bexio caps page size at 2000; we fetch until a short page is returned.
Failures abort early (returning whatever rows were collected) -- the
caller logs the row count, so partial data is visible.
"""
rows: List[Dict[str, Any]] = []
offset = 0
url = self._buildUrl(config, "3.0/accounting/journal")
async with aiohttp.ClientSession() as session:
while True:
params = {"to": dateTo, "limit": str(_JOURNAL_PAGE_SIZE), "offset": str(offset)}
try:
async with session.get(url, headers=self._buildHeaders(config), params=params, timeout=aiohttp.ClientTimeout(total=60)) as resp:
if resp.status != 200:
body = await resp.text()
logger.warning("Bexio /accounting/journal HTTP %s offset=%s: %s", resp.status, offset, body[:200])
break
page = await resp.json()
except Exception as ex:
logger.warning("Bexio /accounting/journal request failed offset=%s: %s", offset, ex)
break
if not isinstance(page, list) or not page:
break
rows.extend(page)
if len(page) < _JOURNAL_PAGE_SIZE:
break
offset += _JOURNAL_PAGE_SIZE
return rows

View file

@ -9,6 +9,7 @@ Base URL: https://service.runmyaccounts.com/api/latest/clients/{clientName}/
"""
import asyncio
import calendar
import json
import logging
import re
@ -21,6 +22,7 @@ from ..accountingConnectorBase import (
BaseAccountingConnector,
AccountingBooking,
AccountingChart,
AccountingPeriodBalance,
ConnectorConfigField,
SyncResult,
)
@ -31,6 +33,73 @@ logger = logging.getLogger(__name__)
_DEFAULT_API_BASE_URL = "https://service.runmyaccounts.com/api/latest/clients/"
def _formatLastDayOfMonth(year: int, month: int) -> str:
"""Return ``YYYY-MM-DD`` of the last day of a calendar month."""
lastDay = calendar.monthrange(year, month)[1]
return f"{year:04d}-{month:02d}-{lastDay:02d}"
def _isIncomeStatementAccount(accountNumber: str) -> bool:
"""Decide whether an account is part of the income statement (Erfolgsrechnung).
Swiss KMU-Kontenrahmen: 1xxx Aktiven, 2xxx Passiven (incl. 28xx
Eigenkapital) -> balance sheet; 3xxx..9xxx -> income statement.
Used by the RMA connector to choose between the two `/gl/saldo` query
variants (with vs. without ``from`` parameter).
"""
a = (accountNumber or "").strip()
if not a or not a[0].isdigit():
return False
return a[0] not in ("1", "2")
def _parseSaldoBody(body: str) -> List[tuple]:
"""Parse the response body of ``GET /gl/saldo`` (JSON or XML).
Returns a list of ``(accountNumber, saldo)`` tuples. The endpoint
delivers ``{"row": [{"column": [accno, label, saldo]}, ...]}`` (JSON) or
``<table><row><column>accno</column><column>label</column><column>saldo</column></row>...``
(XML). Rows that cannot be parsed are silently skipped to keep one bad row
from poisoning the whole sync.
"""
if not body or not body.strip():
return []
rows: List[tuple] = []
try:
data = json.loads(body)
items = data.get("row") if isinstance(data, dict) else data
if isinstance(items, dict):
items = [items]
if isinstance(items, list):
for item in items:
if not isinstance(item, dict):
continue
cols = item.get("column") or []
if isinstance(cols, list) and len(cols) >= 3:
accno = str(cols[0]).strip()
try:
saldo = float(cols[2])
except (TypeError, ValueError):
continue
if accno:
rows.append((accno, saldo))
return rows
except (json.JSONDecodeError, ValueError):
pass
rowMatches = re.findall(r"<row>(.*?)</row>", body, re.DOTALL)
for raw in rowMatches:
cols = re.findall(r"<column>([^<]*)</column>", raw)
if len(cols) >= 3:
accno = cols[0].strip()
try:
saldo = float(cols[2])
except (TypeError, ValueError):
continue
if accno:
rows.append((accno, saldo))
return rows
class AccountingConnectorRma(BaseAccountingConnector):
def getConnectorType(self) -> str:
@ -447,6 +516,191 @@ class AccountingConnectorRma(BaseAccountingConnector):
logger.error(f"RMA getJournalEntries error: {e}", exc_info=True)
return []
async def getAccountBalances(
self,
config: Dict[str, Any],
years: List[int],
accountNumbers: Optional[List[str]] = None,
) -> List[AccountingPeriodBalance]:
"""Fetch authoritative closing balances per account and period via RMA's
``GET /gl/saldo`` endpoint.
For each requested year we issue 13 API calls (one per month-end + one
for the prior fiscal year-end as opening reference). The endpoint
returns the cumulative balance per account at the requested ``to`` date,
already including prior-year carry-over and yearend bookings -- which
is exactly the value the local journal-line aggregation cannot
reconstruct when the import window covers only part of the history.
``accno`` is mandatory; we use a digit-length-grouped wildcard
(``xxxx`` matches all 4-digit accounts, ``xxxxx`` all 5-digit, etc.)
derived from the chart of accounts, so 1-2 calls cover every account
per period.
"""
if not years:
return []
accountNumbersSet: Optional[set] = set(accountNumbers) if accountNumbers else None
wildcardPatterns = await self._resolveWildcardPatterns(config)
if not wildcardPatterns:
logger.warning("RMA getAccountBalances: chart of accounts is empty, no wildcards derivable")
return []
results: List[AccountingPeriodBalance] = []
sortedYears = sorted({int(y) for y in years if y})
for year in sortedYears:
priorYearEnd = f"{year - 1}-12-31"
priorSaldosRaw = await self._fetchSaldoMapForDate(config, wildcardPatterns, priorYearEnd)
# ER (income statement) accounts reset to 0 at the start of each
# fiscal year -- prior-year YTD must NOT carry forward as opening.
priorSaldos = {a: (0.0 if _isIncomeStatementAccount(a) else v) for a, v in priorSaldosRaw.items()}
runningOpening: Dict[str, float] = dict(priorSaldos)
decSaldos: Dict[str, float] = {}
for month in range(1, 13):
lastDay = _formatLastDayOfMonth(year, month)
saldos = await self._fetchSaldoMapForDate(config, wildcardPatterns, lastDay)
accountKeys = set(saldos.keys()) | set(runningOpening.keys())
for accno in accountKeys:
if accountNumbersSet is not None and accno not in accountNumbersSet:
continue
closing = saldos.get(accno, runningOpening.get(accno, 0.0))
opening = runningOpening.get(accno, 0.0)
results.append(AccountingPeriodBalance(
accountNumber=accno,
periodYear=year,
periodMonth=month,
openingBalance=round(opening, 2),
closingBalance=round(closing, 2),
currency="CHF",
asOfDate=lastDay,
))
runningOpening = {**runningOpening, **saldos}
if month == 12:
decSaldos = dict(saldos)
annualKeys = set(decSaldos.keys()) | set(priorSaldos.keys())
for accno in annualKeys:
if accountNumbersSet is not None and accno not in accountNumbersSet:
continue
closing = decSaldos.get(accno, priorSaldos.get(accno, 0.0))
opening = priorSaldos.get(accno, 0.0)
results.append(AccountingPeriodBalance(
accountNumber=accno,
periodYear=year,
periodMonth=0,
openingBalance=round(opening, 2),
closingBalance=round(closing, 2),
currency="CHF",
asOfDate=f"{year}-12-31",
))
logger.info(
"RMA getAccountBalances: %s rows for years=%s, wildcards=%s",
len(results), sortedYears, wildcardPatterns,
)
return results
async def _resolveWildcardPatterns(self, config: Dict[str, Any]) -> List[str]:
"""Derive `accno` wildcard patterns from the chart of accounts.
RMA's `/gl/saldo` requires `accno`; using digit-length-grouped
wildcards (`xxxx`, `xxxxx`, ...) lets us cover every account in 1-2
calls per period instead of one call per account number.
"""
try:
charts = await self.getChartOfAccounts(config)
except Exception as ex:
logger.warning("RMA _resolveWildcardPatterns: getChartOfAccounts failed: %s", ex)
return []
lengths = set()
for c in charts:
accno = (c.accountNumber or "").strip()
if accno.isdigit():
lengths.add(len(accno))
return [("x" * n) for n in sorted(lengths)]
async def _fetchSaldoMapForDate(
self,
config: Dict[str, Any],
wildcardPatterns: List[str],
toDate: str,
) -> Dict[str, float]:
"""Call `/gl/saldo` and return ``{accountNumber: cumulativeSaldo}``.
Per RMA docs ("Warning: Chart of the balance sheet do not need a from
date. Charts of the income statement need from and to parameter."),
we issue **two** calls per pattern:
* No ``from`` -> correct cumulative saldo for balance-sheet accounts
(1xxx, 2xxx in Swiss KMU-Kontenrahmen).
* ``from=YYYY-01-01`` (year of ``toDate``) -> correct YTD result for
income-statement accounts (3xxx..9xxx, which reset annually).
Per account number we keep the value from the appropriate call.
Empty / failed responses are logged at DEBUG and skipped to avoid
aborting the whole sync.
"""
yearStart = f"{toDate[:4]}-01-01"
bsRows: Dict[str, float] = {}
erRows: Dict[str, float] = {}
for pattern in wildcardPatterns:
try:
bs = await self._fetchSaldoRows(config, accno=pattern, fromDate=None, toDate=toDate)
except Exception as ex:
logger.debug("RMA _fetchSaldoMapForDate(BS, pattern=%s, to=%s) failed: %s", pattern, toDate, ex)
bs = []
try:
er = await self._fetchSaldoRows(config, accno=pattern, fromDate=yearStart, toDate=toDate)
except Exception as ex:
logger.debug("RMA _fetchSaldoMapForDate(ER, pattern=%s, %s..%s) failed: %s", pattern, yearStart, toDate, ex)
er = []
for accno, saldo in bs:
bsRows[accno] = saldo
for accno, saldo in er:
erRows[accno] = saldo
merged: Dict[str, float] = {}
for accno in set(bsRows) | set(erRows):
if _isIncomeStatementAccount(accno):
merged[accno] = erRows.get(accno, bsRows.get(accno, 0.0))
else:
merged[accno] = bsRows.get(accno, erRows.get(accno, 0.0))
return merged
async def _fetchSaldoRows(
self,
config: Dict[str, Any],
accno: str,
fromDate: Optional[str],
toDate: str,
) -> List[tuple]:
"""Single `/gl/saldo` call. Returns list of ``(accountNumber, saldo)`` tuples."""
url = self._buildUrl(config, "gl/saldo")
params: Dict[str, str] = {
"accno": accno,
"to": toDate,
"bookkeeping_main_curr": "true",
}
if fromDate:
params["from"] = fromDate
async with aiohttp.ClientSession() as session:
async with session.get(
url,
headers=self._buildHeaders(config),
params=params,
timeout=aiohttp.ClientTimeout(total=20),
) as resp:
if resp.status != 200:
body = await resp.text()
logger.debug("RMA /gl/saldo accno=%s from=%s to=%s -> HTTP %s: %s", accno, fromDate, toDate, resp.status, body[:200])
return []
body = await resp.text()
return _parseSaldoBody(body)
async def _fetchGlBulk(self, config: Dict[str, Any], params: Dict[str, str]) -> List[Dict[str, Any]]:
"""Try GET /gl to fetch journal entries in bulk (not all RMA versions support this)."""
try:

View file

@ -1109,10 +1109,15 @@ class TrusteeObjects:
)
def _cleanDocumentRecords(records):
return [
TrusteeDocument(**{k: v for k, v in r.items() if not k.startswith("_") and k != "documentData"})
for r in records
]
cleaned = []
for r in records:
labelCols = {k: v for k, v in r.items() if k.endswith("Label")}
filteredFields = {k: v for k, v in r.items() if not k.startswith("_") and k != "documentData"}
doc = TrusteeDocument(**filteredFields)
d = doc.model_dump()
d.update(labelCols)
cleaned.append(d)
return cleaned
if isinstance(result, PaginatedResult):
result.items = _cleanDocumentRecords(result.items)
@ -1133,10 +1138,15 @@ class TrusteeObjects:
)
def _cleanDocumentRecords(records):
return [
TrusteeDocument(**{k: v for k, v in r.items() if not k.startswith("_") and k != "documentData"})
for r in records
]
cleaned = []
for r in records:
labelCols = {k: v for k, v in r.items() if k.endswith("Label")}
filteredFields = {k: v for k, v in r.items() if not k.startswith("_") and k != "documentData"}
doc = TrusteeDocument(**filteredFields)
d = doc.model_dump()
d.update(labelCols)
cleaned.append(d)
return cleaned
if isinstance(result, PaginatedResult):
result.items = _cleanDocumentRecords(result.items)
@ -1297,10 +1307,13 @@ class TrusteeObjects:
def _cleanAndValidate(records):
items = []
for record in records:
labelCols = {k: v for k, v in record.items() if k.endswith("Label")}
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_") or k in keepFields}
position = self._toTrusteePositionOrDelete(cleanedRecord, deleteCorrupt=True)
if position is not None:
items.append(position)
d = position.model_dump()
d.update(labelCols)
items.append(d)
return items
if isinstance(result, PaginatedResult):

View file

@ -394,9 +394,15 @@ TEMPLATE_WORKFLOWS = [
{"id": "extract", "type": "trustee.extractFromFiles", "label": "Dokumente extrahieren", "_method": "trustee", "_action": "extractFromFiles",
"parameters": {"featureInstanceId": "{{featureInstanceId}}", "prompt": ""}, "position": {"x": 250, "y": 0}},
{"id": "process", "type": "trustee.processDocuments", "label": "Verarbeiten", "_method": "trustee", "_action": "processDocuments",
"parameters": {"documentList": [], "featureInstanceId": "{{featureInstanceId}}"}, "position": {"x": 500, "y": 0}},
"parameters": {
"documentList": {"type": "ref", "nodeId": "extract", "path": ["documents"]},
"featureInstanceId": "{{featureInstanceId}}",
}, "position": {"x": 500, "y": 0}},
{"id": "sync", "type": "trustee.syncToAccounting", "label": "Synchronisieren", "_method": "trustee", "_action": "syncToAccounting",
"parameters": {"documentList": [], "featureInstanceId": "{{featureInstanceId}}"}, "position": {"x": 750, "y": 0}},
"parameters": {
"documentList": {"type": "ref", "nodeId": "process", "path": ["documents"]},
"featureInstanceId": "{{featureInstanceId}}",
}, "position": {"x": 750, "y": 0}},
],
"connections": [
{"source": "trigger", "sourcePort": 0, "target": "extract", "targetPort": 0},

View file

@ -412,34 +412,41 @@ def get_position_options(
# ===== Organisation Routes =====
@router.get("/{instanceId}/organisations", response_model=PaginatedResponse[TrusteeOrganisation])
@router.get("/{instanceId}/organisations")
@limiter.limit("30/minute")
def get_organisations(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams"),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[TrusteeOrganisation]:
):
"""Get all organisations for a feature instance with optional pagination."""
from modules.routes.routeHelpers import enrichRowsWithFkLabels
mandateId = _validateInstanceAccess(instanceId, context)
paginationParams = _parsePagination(pagination)
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllOrganisations(paginationParams)
def _toDicts(items):
return [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
if paginationParams and hasattr(result, 'items'):
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
enriched = enrichRowsWithFkLabels(_toDicts(result.items), TrusteeOrganisation)
return {
"items": enriched,
"pagination": PaginationMetadata(
currentPage=paginationParams.page or 1,
pageSize=paginationParams.pageSize or 20,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None
)
)
return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
).model_dump(),
}
items = result if isinstance(result, list) else result.items
enriched = enrichRowsWithFkLabels(_toDicts(items), TrusteeOrganisation)
return {"items": enriched, "pagination": None}
@router.get("/{instanceId}/organisations/{orgId}", response_model=TrusteeOrganisation)
@ -525,34 +532,41 @@ def delete_organisation(
# ===== Role Routes =====
@router.get("/{instanceId}/roles", response_model=PaginatedResponse[TrusteeRole])
@router.get("/{instanceId}/roles")
@limiter.limit("30/minute")
def get_roles(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
pagination: Optional[str] = Query(None),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[TrusteeRole]:
):
"""Get all roles with optional pagination."""
from modules.routes.routeHelpers import enrichRowsWithFkLabels
mandateId = _validateInstanceAccess(instanceId, context)
paginationParams = _parsePagination(pagination)
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllRoles(paginationParams)
def _toDicts(items):
return [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
if paginationParams and hasattr(result, 'items'):
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
enriched = enrichRowsWithFkLabels(_toDicts(result.items), TrusteeRole)
return {
"items": enriched,
"pagination": PaginationMetadata(
currentPage=paginationParams.page or 1,
pageSize=paginationParams.pageSize or 20,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None
)
)
return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
).model_dump(),
}
items = result if isinstance(result, list) else result.items
enriched = enrichRowsWithFkLabels(_toDicts(items), TrusteeRole)
return {"items": enriched, "pagination": None}
@router.get("/{instanceId}/roles/{roleId}", response_model=TrusteeRole)
@ -638,34 +652,41 @@ def delete_role(
# ===== Access Routes =====
@router.get("/{instanceId}/access", response_model=PaginatedResponse[TrusteeAccess])
@router.get("/{instanceId}/access")
@limiter.limit("30/minute")
def get_all_access(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
pagination: Optional[str] = Query(None),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[TrusteeAccess]:
):
"""Get all access records with optional pagination."""
from modules.routes.routeHelpers import enrichRowsWithFkLabels
mandateId = _validateInstanceAccess(instanceId, context)
paginationParams = _parsePagination(pagination)
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllAccess(paginationParams)
def _toDicts(items):
return [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
if paginationParams and hasattr(result, 'items'):
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
enriched = enrichRowsWithFkLabels(_toDicts(result.items), TrusteeAccess)
return {
"items": enriched,
"pagination": PaginationMetadata(
currentPage=paginationParams.page or 1,
pageSize=paginationParams.pageSize or 20,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None
)
)
return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
).model_dump(),
}
items = result if isinstance(result, list) else result.items
enriched = enrichRowsWithFkLabels(_toDicts(items), TrusteeAccess)
return {"items": enriched, "pagination": None}
@router.get("/{instanceId}/access/{accessId}", response_model=TrusteeAccess)
@ -781,34 +802,41 @@ def delete_access(
# ===== Contract Routes =====
@router.get("/{instanceId}/contracts", response_model=PaginatedResponse[TrusteeContract])
@router.get("/{instanceId}/contracts")
@limiter.limit("30/minute")
def get_contracts(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
pagination: Optional[str] = Query(None),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[TrusteeContract]:
):
"""Get all contracts with optional pagination."""
from modules.routes.routeHelpers import enrichRowsWithFkLabels
mandateId = _validateInstanceAccess(instanceId, context)
paginationParams = _parsePagination(pagination)
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllContracts(paginationParams)
def _toDicts(items):
return [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
if paginationParams and hasattr(result, 'items'):
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
enriched = enrichRowsWithFkLabels(_toDicts(result.items), TrusteeContract)
return {
"items": enriched,
"pagination": PaginationMetadata(
currentPage=paginationParams.page or 1,
pageSize=paginationParams.pageSize or 20,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None
)
)
return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
).model_dump(),
}
items = result if isinstance(result, list) else result.items
enriched = enrichRowsWithFkLabels(_toDicts(items), TrusteeContract)
return {"items": enriched, "pagination": None}
@router.get("/{instanceId}/contracts/{contractId}", response_model=TrusteeContract)
@ -909,7 +937,7 @@ def delete_contract(
# ===== Document Routes =====
@router.get("/{instanceId}/documents", response_model=PaginatedResponse[TrusteeDocument])
@router.get("/{instanceId}/documents")
@limiter.limit("30/minute")
def get_documents(
request: Request,
@ -918,7 +946,7 @@ def get_documents(
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[TrusteeDocument]:
):
"""Get all documents (metadata only) with optional pagination."""
mandateId = _validateInstanceAccess(instanceId, context)
@ -929,19 +957,23 @@ def get_documents(
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllDocuments(paginationParams)
def _itemsToDicts(items):
return [r.model_dump() if hasattr(r, 'model_dump') else r for r in items]
if paginationParams and hasattr(result, 'items'):
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
return {
"items": _itemsToDicts(result.items),
"pagination": PaginationMetadata(
currentPage=paginationParams.page or 1,
pageSize=paginationParams.pageSize or 20,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None
)
)
return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
).model_dump(),
}
items = result if isinstance(result, list) else result.items
return {"items": _itemsToDicts(items), "pagination": None}
def _handleDocumentMode(instanceId, mandateId, mode, column, pagination, context):
@ -1154,7 +1186,7 @@ def delete_document(
# ===== Position Routes =====
@router.get("/{instanceId}/positions", response_model=PaginatedResponse[TrusteePosition])
@router.get("/{instanceId}/positions")
@limiter.limit("30/minute")
def get_positions(
request: Request,
@ -1163,7 +1195,7 @@ def get_positions(
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[TrusteePosition]:
):
"""Get all positions with optional pagination."""
mandateId = _validateInstanceAccess(instanceId, context)
@ -1174,19 +1206,23 @@ def get_positions(
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllPositions(paginationParams)
def _itemsToDicts(items):
return [r.model_dump() if hasattr(r, 'model_dump') else r for r in items]
if paginationParams and hasattr(result, 'items'):
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
return {
"items": _itemsToDicts(result.items),
"pagination": PaginationMetadata(
currentPage=paginationParams.page or 1,
pageSize=paginationParams.pageSize or 20,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None
)
)
return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
).model_dump(),
}
items = result if isinstance(result, list) else result.items
return {"items": _itemsToDicts(items), "pagination": None}
def _handlePositionMode(instanceId, mandateId, mode, column, pagination, context):
@ -1347,8 +1383,8 @@ def get_available_accounting_connectors(
) -> List[Dict[str, Any]]:
"""List all available accounting system connectors with their config fields."""
_validateInstanceAccess(instanceId, context)
from .accounting.accountingRegistry import _getAccountingRegistry
return _getAccountingRegistry().getAvailableConnectors()
from .accounting.accountingRegistry import getAccountingRegistry
return getAccountingRegistry().getAvailableConnectors()
# Placeholder returned for secret config fields so frontend can prefill form without sending real secrets.
@ -1357,8 +1393,8 @@ _CONFIG_PLACEHOLDER = "***"
def _getConfigMasked(connectorType: str, plainConfig: Dict[str, Any]) -> Dict[str, str]:
"""Build config with secret values replaced by placeholder for GET response."""
from .accounting.accountingRegistry import _getAccountingRegistry
connector = _getAccountingRegistry().getConnector(connectorType)
from .accounting.accountingRegistry import getAccountingRegistry
connector = getAccountingRegistry().getConnector(connectorType)
if not connector:
return {k: (v if isinstance(v, str) else str(v)) for k, v in (plainConfig or {}).items()}
secretKeys = {f.key for f in connector.getRequiredConfigFields() if f.secret}
@ -2081,13 +2117,13 @@ def _serializeRoleForApi(role) -> Dict[str, Any]:
return payload
@router.get("/{instanceId}/instance-roles", response_model=PaginatedResponse)
@router.get("/{instanceId}/instance-roles")
@limiter.limit("30/minute")
def get_instance_roles(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse:
):
"""
Get all roles for this feature instance.
Requires feature admin permission.
@ -2095,14 +2131,9 @@ def get_instance_roles(
mandateId = _validateInstanceAdmin(instanceId, context)
rootInterface = getRootInterface()
# Get instance-specific roles (Pydantic models)
roles = rootInterface.getRolesByFeatureCode("trustee", featureInstanceId=instanceId)
return PaginatedResponse(
items=[_serializeRoleForApi(r) for r in roles],
pagination=None
)
return {"items": [_serializeRoleForApi(r) for r in roles], "pagination": None}
@router.get("/{instanceId}/instance-roles/{roleId}", response_model=Dict[str, Any])
@ -2129,14 +2160,14 @@ def get_instance_role(
return _serializeRoleForApi(role)
@router.get("/{instanceId}/instance-roles/{roleId}/rules", response_model=PaginatedResponse)
@router.get("/{instanceId}/instance-roles/{roleId}/rules")
@limiter.limit("30/minute")
def get_instance_role_rules(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
roleId: str = Path(..., description="Role ID"),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse:
):
"""
Get all AccessRules for a specific instance role.
Requires feature admin permission.
@ -2145,18 +2176,13 @@ def get_instance_role_rules(
rootInterface = getRootInterface()
# Verify role belongs to this instance (Pydantic model)
role = rootInterface.getRole(roleId)
if not role or str(role.featureInstanceId) != instanceId:
raise HTTPException(status_code=404, detail=f"Role {roleId} not found in this instance")
# Get AccessRules for this role (Pydantic models)
rules = rootInterface.getAccessRulesByRole(roleId)
return PaginatedResponse(
items=[r.model_dump() for r in rules],
pagination=None
)
return {"items": [r.model_dump() for r in rules], "pagination": None}
@router.post("/{instanceId}/instance-roles/{roleId}/rules", response_model=Dict[str, Any], status_code=201)
@ -2336,6 +2362,7 @@ def _paginatedReadEndpoint(
handleFilterValuesInMemory,
handleIdsInMemory,
parseCrossFilterPagination,
enrichRowsWithFkLabels,
)
from fastapi.responses import JSONResponse
@ -2401,23 +2428,28 @@ def _paginatedReadEndpoint(
featureCode=interface.FEATURE_CODE,
)
def _itemsToDicts(rawItems):
return [r.model_dump() if hasattr(r, "model_dump") else r for r in rawItems]
if paginationParams and hasattr(result, "items"):
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
enriched = enrichRowsWithFkLabels(_itemsToDicts(result.items), modelClass)
return {
"items": enriched,
"pagination": PaginationMetadata(
currentPage=paginationParams.page or 1,
pageSize=paginationParams.pageSize or 20,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort if paginationParams else [],
filters=paginationParams.filters if paginationParams else None,
),
)
).model_dump(),
}
items = result.items if hasattr(result, "items") else result
return PaginatedResponse(items=items, pagination=None)
enriched = enrichRowsWithFkLabels(_itemsToDicts(items), modelClass)
return {"items": enriched, "pagination": None}
@router.get("/{instanceId}/data/accounts", response_model=PaginatedResponse[TrusteeDataAccount])
@router.get("/{instanceId}/data/accounts")
@limiter.limit("30/minute")
def get_data_accounts(
request: Request,
@ -2438,7 +2470,7 @@ def get_data_accounts(
)
@router.get("/{instanceId}/data/journal-entries", response_model=PaginatedResponse[TrusteeDataJournalEntry])
@router.get("/{instanceId}/data/journal-entries")
@limiter.limit("30/minute")
def get_data_journal_entries(
request: Request,
@ -2459,7 +2491,7 @@ def get_data_journal_entries(
)
@router.get("/{instanceId}/data/journal-lines", response_model=PaginatedResponse[TrusteeDataJournalLine])
@router.get("/{instanceId}/data/journal-lines")
@limiter.limit("30/minute")
def get_data_journal_lines(
request: Request,
@ -2480,7 +2512,7 @@ def get_data_journal_lines(
)
@router.get("/{instanceId}/data/contacts", response_model=PaginatedResponse[TrusteeDataContact])
@router.get("/{instanceId}/data/contacts")
@limiter.limit("30/minute")
def get_data_contacts(
request: Request,
@ -2501,7 +2533,7 @@ def get_data_contacts(
)
@router.get("/{instanceId}/data/account-balances", response_model=PaginatedResponse[TrusteeDataAccountBalance])
@router.get("/{instanceId}/data/account-balances")
@limiter.limit("30/minute")
def get_data_account_balances(
request: Request,
@ -2522,7 +2554,7 @@ def get_data_account_balances(
)
@router.get("/{instanceId}/accounting/configs", response_model=PaginatedResponse[TrusteeAccountingConfig])
@router.get("/{instanceId}/accounting/configs")
@limiter.limit("30/minute")
def get_accounting_configs(
request: Request,
@ -2548,7 +2580,7 @@ def get_accounting_configs(
)
@router.get("/{instanceId}/accounting/syncs", response_model=PaginatedResponse[TrusteeAccountingSync])
@router.get("/{instanceId}/accounting/syncs")
@limiter.limit("30/minute")
def get_accounting_syncs(
request: Request,

View file

@ -191,7 +191,7 @@ _SOURCE_TYPE_TO_SERVICE = {
}
def _buildDataSourceContext(chatService, dataSourceIds: List[str]) -> str:
def buildDataSourceContext(chatService, dataSourceIds: List[str]) -> str:
"""Build a description of active data sources for the agent prompt."""
parts = [
"The user has attached the following external data sources to this prompt.",
@ -229,7 +229,7 @@ def _buildDataSourceContext(chatService, dataSourceIds: List[str]) -> str:
return "\n".join(parts) if found else ""
def _buildFeatureDataSourceContext(featureDataSourceIds: List[str]) -> str:
def buildFeatureDataSourceContext(featureDataSourceIds: List[str]) -> str:
"""Build a description of attached feature data sources for the agent prompt."""
from modules.datamodels.datamodelFeatureDataSource import FeatureDataSource
from modules.security.rbacCatalog import getCatalogService
@ -735,12 +735,12 @@ async def _runWorkspaceAgent(
enrichedPrompt = prompt
if dataSourceIds:
dsInfo = _buildDataSourceContext(chatService, dataSourceIds)
dsInfo = buildDataSourceContext(chatService, dataSourceIds)
if dsInfo:
enrichedPrompt = f"{prompt}\n\n[Active Data Sources]\n{dsInfo}"
if featureDataSourceIds:
fdsInfo = _buildFeatureDataSourceContext(featureDataSourceIds)
fdsInfo = buildFeatureDataSourceContext(featureDataSourceIds)
if fdsInfo:
enrichedPrompt = f"{enrichedPrompt}\n\n[Attached Feature Data Sources]\n{fdsInfo}"

View file

@ -139,7 +139,7 @@ def _bootstrapBilling() -> None:
Idempotent: only creates missing settings/accounts.
"""
try:
from modules.interfaces.interfaceDbBilling import _getRootInterface as getBillingRootInterface
from modules.interfaces.interfaceDbBilling import getRootInterface as getBillingRootInterface
billingInterface = getBillingRootInterface()
@ -1968,11 +1968,11 @@ def initRootMandateBilling(mandateId: str) -> None:
Creates mandate pool account and user audit accounts.
"""
try:
from modules.interfaces.interfaceDbBilling import _getRootInterface
from modules.interfaces.interfaceDbBilling import getRootInterface as getBillingRootInterface
from modules.interfaces.interfaceDbApp import getRootInterface as getAppRootInterface
from modules.datamodels.datamodelBilling import BillingSettings
billingInterface = _getRootInterface()
billingInterface = getBillingRootInterface()
appInterface = getAppRootInterface()
existingSettings = billingInterface.getSettings(mandateId)
@ -2012,7 +2012,7 @@ def _initRootMandateSubscription(mandateId: str) -> None:
Called during bootstrap after billing init.
"""
try:
from modules.interfaces.interfaceDbSubscription import _getRootInterface as getSubRootInterface
from modules.interfaces.interfaceDbSubscription import getRootInterface as getSubRootInterface
from modules.datamodels.datamodelSubscription import (
MandateSubscription,
SubscriptionStatusEnum,

View file

@ -15,7 +15,7 @@ from typing import Dict, Any, List, Optional, Union
from passlib.context import CryptContext
import uuid
from modules.connectors.connectorDbPostgre import DatabaseConnector, _get_cached_connector
from modules.connectors.connectorDbPostgre import DatabaseConnector, getCachedConnector
from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
@ -143,7 +143,7 @@ class AppObjects:
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
self.db = _get_cached_connector(
self.db = getCachedConnector(
dbHost=dbHost,
dbDatabase=dbDatabase,
dbUser=dbUser,
@ -1594,8 +1594,8 @@ class AppObjects:
if not adminRoleId:
raise ValueError(f"No admin role found for mandate {mandateId} — cannot assign user without role")
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
from modules.interfaces.interfaceDbBilling import _getRootInterface as _getBillingRoot
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
from modules.interfaces.interfaceDbBilling import getRootInterface as _getBillingRoot
from datetime import datetime, timezone, timedelta
now = datetime.now(timezone.utc)
@ -1693,7 +1693,7 @@ class AppObjects:
from modules.datamodels.datamodelSubscription import (
SubscriptionStatusEnum, BUILTIN_PLANS,
)
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
from datetime import datetime, timezone, timedelta
activated = 0
@ -1936,7 +1936,7 @@ class AppObjects:
logger.info(f"Cascade: deleted {len(memberships)} UserMandates for mandate {mandateId}")
# 3. Cancel Stripe subscriptions + delete MandateSubscription records (poweron_billing)
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
subInterface = _getSubRoot()
subs = subInterface.listForMandate(mandateId)
for sub in subs:
@ -1954,7 +1954,7 @@ class AppObjects:
logger.info(f"Cascade: deleted {len(subs)} subscriptions for mandate {mandateId}")
# 3b. Delete Billing data (poweron_billing)
from modules.interfaces.interfaceDbBilling import _getRootInterface as _getBillingRoot
from modules.interfaces.interfaceDbBilling import getRootInterface as _getBillingRoot
billingDb = _getBillingRoot().db
billingAccounts = billingDb.getRecordset(BillingAccount, recordFilter={"mandateId": mandateId})
for acc in billingAccounts:
@ -2202,7 +2202,7 @@ class AppObjects:
Balance is always on the mandate pool (PREPAY_MANDATE). User accounts are for audit trail only.
"""
try:
from modules.interfaces.interfaceDbBilling import _getRootInterface as getBillingRootInterface
from modules.interfaces.interfaceDbBilling import getRootInterface as getBillingRootInterface
billingInterface = getBillingRootInterface()
settings = billingInterface.getSettings(mandateId)

View file

@ -134,7 +134,7 @@ def getInterface(currentUser: User, mandateId: str = None) -> "BillingObjects":
return _billingInterfaces[cacheKey]
def _getRootInterface() -> "BillingObjects":
def getRootInterface() -> "BillingObjects":
"""Get interface with system access for bootstrap operations."""
from modules.security.rootAccess import getRootUser
rootUser = getRootUser()
@ -888,7 +888,7 @@ class BillingObjects:
prev = self._parseSettingsDateTime(settings.get("storagePeriodStartAt"))
if prev is not None and abs((prev - periodStartAt).total_seconds()) < 2:
return
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
usedMB = float(_getSubRoot().getMandateDataVolumeMB(mandateId))
self.updateSettings(
@ -911,13 +911,13 @@ class BillingObjects:
settings = self.getSettings(mandateId)
if not settings:
return None
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
from modules.datamodels.datamodelSubscription import _getPlan
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
from modules.datamodels.datamodelSubscription import getPlan
subIface = _getSubRoot()
usedMB = float(subIface.getMandateDataVolumeMB(mandateId))
sub = subIface.getOperativeForMandate(mandateId)
plan = _getPlan(sub.get("planKey", "")) if sub else None
plan = getPlan(sub.get("planKey", "")) if sub else None
includedMB = plan.maxDataVolumeMB if plan and plan.maxDataVolumeMB is not None else None
if includedMB is None:
return None
@ -971,13 +971,13 @@ class BillingObjects:
Amount = budgetAiPerUserCHF * activeUsers (dynamic, not the static plan.budgetAiCHF).
Should be called once per billing period (initial activation + each invoice.paid).
Returns the created CREDIT transaction or None if budget is 0."""
from modules.datamodels.datamodelSubscription import _getPlan
from modules.datamodels.datamodelSubscription import getPlan
plan = _getPlan(planKey)
plan = getPlan(planKey)
if not plan or not plan.budgetAiPerUserCHF or plan.budgetAiPerUserCHF <= 0:
return None
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
subRoot = _getSubRoot()
activeUsers = max(subRoot.countActiveUsers(mandateId), 1)
amount = plan.budgetAiPerUserCHF * activeUsers
@ -1027,13 +1027,13 @@ class BillingObjects:
delta > 0: user added -> CREDIT pro-rata portion
delta < 0: user removed -> DEBIT pro-rata portion
"""
from modules.datamodels.datamodelSubscription import _getPlan
from modules.datamodels.datamodelSubscription import getPlan
plan = _getPlan(planKey)
plan = getPlan(planKey)
if not plan or not plan.budgetAiPerUserCHF or plan.budgetAiPerUserCHF <= 0:
return None
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
subRoot = _getSubRoot()
operative = subRoot.getOperativeForMandate(mandateId)
if not operative:
@ -1221,7 +1221,7 @@ class BillingObjects:
if not mandate or not getattr(mandate, "enabled", True):
continue
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None) or f"NA({mandateId})"
settings = self.getSettings(mandateId)
if not settings:
@ -1280,13 +1280,12 @@ class BillingObjects:
if not userAccount:
continue
# Get transactions for user's account (all transactions are on user accounts now)
transactions = self.getTransactions(userAccount["id"], limit=limit)
mandate = appInterface.getMandate(mandateId)
mandateName = ""
mandateName = f"NA({mandateId})"
if mandate:
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None) or f"NA({mandateId})"
for t in transactions:
t["mandateId"] = mandateId
@ -1333,9 +1332,9 @@ class BillingObjects:
continue
mandate = appInterface.getMandate(mandateId)
mandateName = ""
mandateName = f"NA({mandateId})"
if mandate:
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None) or f"NA({mandateId})"
allMandateAccounts = self.db.getRecordset(
BillingAccount,
@ -1387,11 +1386,10 @@ class BillingObjects:
for mandateId in targetMandateIds:
transactions = self.getTransactionsByMandate(mandateId, limit=limit)
# Get mandate name
mandate = appInterface.getMandate(mandateId)
mandateName = ""
mandateName = f"NA({mandateId})"
if mandate:
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None) or f"NA({mandateId})"
for t in transactions:
t["mandateId"] = mandateId
@ -1439,7 +1437,6 @@ class BillingObjects:
for s in allSettings:
settingsMap[s.get("mandateId")] = s
# Get user info efficiently
userIds = list(set(acc.get("userId") for acc in allAccounts if acc.get("userId")))
userMap = {}
for userId in userIds:
@ -1447,16 +1444,15 @@ class BillingObjects:
if user:
displayName = getattr(user, 'displayName', None) or (user.get("displayName") if isinstance(user, dict) else None)
username = getattr(user, 'username', None) or (user.get("username") if isinstance(user, dict) else None)
userMap[userId] = displayName or username or userId
userMap[userId] = displayName or username or f"NA({userId})"
# Get mandate info efficiently
mandateMap = {}
mandateIdList = list(set(acc.get("mandateId") for acc in allAccounts if acc.get("mandateId")))
for mandateId in mandateIdList:
mandate = appInterface.getMandate(mandateId)
if mandate:
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
mandateMap[mandateId] = mandateName
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None)
mandateMap[mandateId] = mandateName or f"NA({mandateId})"
for account in allAccounts:
mandateId = account.get("mandateId")
@ -1475,9 +1471,9 @@ class BillingObjects:
balances.append({
"accountId": account.get("id"),
"mandateId": mandateId,
"mandateName": mandateMap.get(mandateId, ""),
"mandateName": mandateMap.get(mandateId) or (f"NA({mandateId})" if mandateId else None),
"userId": userId,
"userName": userMap.get(userId, userId),
"userName": userMap.get(userId) or (f"NA({userId})" if userId else None),
"balance": balance,
"warningThreshold": warningThreshold,
"isWarning": balance <= warningThreshold,
@ -1596,14 +1592,14 @@ class BillingObjects:
if pageUserIds:
users = appInterface.getUsersByIds(list(pageUserIds))
for uid, u in users.items():
dn = getattr(u, "displayName", None) or getattr(u, "username", None) or uid
dn = getattr(u, "displayName", None) or getattr(u, "username", None) or f"NA({uid})"
userMap[uid] = dn
mandateMap: Dict[str, str] = {}
if pageMandateIds:
mandates = appInterface.getMandatesByIds(list(pageMandateIds))
for mid, m in mandates.items():
mandateMap[mid] = getattr(m, "label", None) or getattr(m, "name", None) or mid
mandateMap[mid] = getattr(m, "label", None) or getattr(m, "name", None) or f"NA({mid})"
enriched = []
for t in pageItems:
@ -1613,9 +1609,9 @@ class BillingObjects:
mid = acc.get("mandateId")
txUserId = row.get("createdByUserId") or acc.get("userId")
row["mandateId"] = mid
row["mandateName"] = mandateMap.get(mid, "")
row["mandateName"] = mandateMap.get(mid) or (f"NA({mid})" if mid else None)
row["userId"] = txUserId
row["userName"] = userMap.get(txUserId, txUserId) if txUserId else None
row["userName"] = userMap.get(txUserId) or (f"NA({txUserId})" if txUserId else None)
enriched.append(row)
return PaginatedResult(items=enriched, totalItems=totalItems, totalPages=totalPages)
@ -1639,12 +1635,12 @@ class BillingObjects:
first, then builds a single SQL query with OR-combined conditions.
"""
import math
from modules.connectors.connectorDbPostgre import _get_model_fields, _parseRecordFields
from modules.connectors.connectorDbPostgre import getModelFields, parseRecordFields
from modules.datamodels.datamodelUam import UserInDB
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
table = BillingTransaction.__name__
fields = _get_model_fields(BillingTransaction)
fields = getModelFields(BillingTransaction)
pattern = f"%{searchTerm}%"
# Resolve matching user / mandate IDs via the app DB (which is separate
@ -1785,7 +1781,7 @@ class BillingObjects:
records = [dict(row) for row in cur.fetchall()]
for rec in records:
_parseRecordFields(rec, fields, f"search table {table}")
parseRecordFields(rec, fields, f"search table {table}")
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
return {"items": records, "totalItems": totalItems, "totalPages": totalPages}
@ -2023,7 +2019,7 @@ class BillingObjects:
appInterface = getAppInterface(self.currentUser)
mandates = appInterface.getMandatesByIds(mandateIds)
return sorted(
{getattr(m, "label", None) or getattr(m, "name", "") or mid for mid, m in mandates.items()},
{getattr(m, "label", None) or getattr(m, "name", None) or f"NA({mid})" for mid, m in mandates.items()},
key=lambda v: v.lower(),
)
@ -2035,7 +2031,7 @@ class BillingObjects:
appInterface = getAppInterface(self.currentUser)
users = appInterface.getUsersByIds(values)
return sorted(
{getattr(u, "displayName", None) or getattr(u, "username", None) or uid for uid, u in users.items()},
{getattr(u, "displayName", None) or getattr(u, "username", None) or f"NA({uid})" for uid, u in users.items()},
key=lambda v: v.lower(),
)
@ -2075,7 +2071,6 @@ class BillingObjects:
"userId": acc.get("userId")
}
# Get user info efficiently
userIds = list(set(acc.get("userId") for acc in allAccounts if acc.get("userId")))
userMap = {}
for userId in userIds:
@ -2083,16 +2078,15 @@ class BillingObjects:
if user:
displayName = getattr(user, 'displayName', None) or (user.get("displayName") if isinstance(user, dict) else None)
username = getattr(user, 'username', None) or (user.get("username") if isinstance(user, dict) else None)
userMap[userId] = displayName or username or userId
userMap[userId] = displayName or username or f"NA({userId})"
# Get mandate info efficiently
mandateMap = {}
mandateIdList = list(set(acc.get("mandateId") for acc in allAccounts if acc.get("mandateId")))
for mandateId in mandateIdList:
mandate = appInterface.getMandate(mandateId)
if mandate:
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
mandateMap[mandateId] = mandateName
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None)
mandateMap[mandateId] = mandateName or f"NA({mandateId})"
# Get transactions for all accounts and collect createdByUserIds
rawTransactions = []
@ -2123,18 +2117,16 @@ class BillingObjects:
if user:
displayName = getattr(user, 'displayName', None) or (user.get("displayName") if isinstance(user, dict) else None)
username = getattr(user, 'username', None) or (user.get("username") if isinstance(user, dict) else None)
userMap[uid] = displayName or username or uid
userMap[uid] = displayName or username or f"NA({uid})"
# Enrich transactions
for t in rawTransactions:
mandateId = t.pop("_accountMandateId", None)
accountUserId = t.pop("_accountUserId", None)
t["mandateId"] = mandateId
t["mandateName"] = mandateMap.get(mandateId, "")
# Prefer createdByUserId (per-transaction) over account-derived userId
t["mandateName"] = mandateMap.get(mandateId) or (f"NA({mandateId})" if mandateId else None)
txUserId = t.get("createdByUserId") or accountUserId
t["userId"] = txUserId
t["userName"] = userMap.get(txUserId, txUserId) if txUserId else None
t["userName"] = userMap.get(txUserId) or (f"NA({txUserId})" if txUserId else None)
allTransactions.append(t)
except Exception as e:

View file

@ -62,13 +62,13 @@ def storeDebugMessageAndDocuments(message, currentUser, mandateId=None, featureI
try:
import os
from datetime import datetime, UTC
from modules.shared.debugLogger import _getBaseDebugDir, _ensureDir
from modules.shared.debugLogger import getBaseDebugDir, ensureDir
from modules.interfaces.interfaceDbManagement import getInterface
# Create base debug directory (use base debug dir, not prompts subdirectory)
baseDebugDir = _getBaseDebugDir()
baseDebugDir = getBaseDebugDir()
debug_root = os.path.join(baseDebugDir, 'messages')
_ensureDir(debug_root)
ensureDir(debug_root)
# Generate timestamp
timestamp = datetime.now(UTC).strftime('%Y%m%d-%H%M%S-%f')[:-3]
@ -133,7 +133,7 @@ def storeDebugMessageAndDocuments(message, currentUser, mandateId=None, featureI
safe_label = "default"
label_folder = os.path.join(message_path, safe_label)
_ensureDir(label_folder)
ensureDir(label_folder)
# Store each document
for i, doc in enumerate(docs):

View file

@ -11,7 +11,7 @@ from collections import defaultdict
from datetime import datetime, timezone, timedelta
from typing import Dict, Any, List, Optional
from modules.connectors.connectorDbPostgre import _get_cached_connector
from modules.connectors.connectorDbPostgre import getCachedConnector
from modules.shared.dbRegistry import registerDatabase
from modules.datamodels.datamodelKnowledge import FileContentIndex, ContentChunk, RoundMemory, WorkflowMemory
from modules.datamodels.datamodelUam import User
@ -43,7 +43,7 @@ class KnowledgeObjects:
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
self.db = _get_cached_connector(
self.db = getCachedConnector(
dbHost=dbHost,
dbDatabase=dbDatabase,
dbUser=dbUser,
@ -103,9 +103,9 @@ class KnowledgeObjects:
ok = self.db.recordDelete(FileContentIndex, fileId)
if ok and mandateId:
try:
from modules.interfaces.interfaceDbBilling import _getRootInterface
from modules.interfaces.interfaceDbBilling import getRootInterface
_getRootInterface().reconcileMandateStorageBilling(str(mandateId))
getRootInterface().reconcileMandateStorageBilling(str(mandateId))
except Exception as ex:
logger.warning("reconcileMandateStorageBilling after delete failed: %s", ex)
return ok

View file

@ -13,7 +13,7 @@ import math
import mimetypes
from typing import Dict, Any, List, Optional, Union
from modules.connectors.connectorDbPostgre import DatabaseConnector, _get_cached_connector
from modules.connectors.connectorDbPostgre import DatabaseConnector, getCachedConnector
from modules.shared.dbRegistry import registerDatabase
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC, getRecordsetPaginatedWithRBAC
from modules.security.rbac import RbacClass
@ -136,7 +136,7 @@ class ComponentObjects:
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
self.db = _get_cached_connector(
self.db = getCachedConnector(
dbHost=dbHost,
dbDatabase=dbDatabase,
dbUser=dbUser,
@ -992,8 +992,11 @@ class ComponentObjects:
if file.get("neutralize") is None:
file["neutralize"] = False
labelCols = {k: v for k, v in file.items() if k.endswith("Label")}
fileItem = FileItem(**file)
fileItems.append(fileItem)
itemDict = fileItem.model_dump()
itemDict.update(labelCols)
fileItems.append(itemDict)
except Exception as e:
logger.warning(f"Skipping invalid file record: {str(e)}")
continue
@ -1347,8 +1350,8 @@ class ComponentObjects:
folderIds = [f["id"] for f in folders if f.get("id")]
fileCounts: Dict[str, int] = {}
try:
from modules.interfaces.interfaceRbac import _buildFilesScopeWhereClause
scopeClause = _buildFilesScopeWhereClause(
from modules.interfaces.interfaceRbac import buildFilesScopeWhereClause
scopeClause = buildFilesScopeWhereClause(
self.currentUser, "FileItem", self.db,
self.mandateId, self.featureInstanceId,
[], [],

View file

@ -25,7 +25,7 @@ from modules.datamodels.datamodelSubscription import (
TERMINAL_STATUSES,
OPERATIVE_STATUSES,
BUILTIN_PLANS,
_getPlan,
getPlan as getPlanFromCatalog,
_getSelectablePlans,
)
@ -55,7 +55,7 @@ def getInterface(currentUser: User, mandateId: str = None) -> "SubscriptionObjec
return _subscriptionInterfaces[cacheKey]
def _getRootInterface() -> "SubscriptionObjects":
def getRootInterface() -> "SubscriptionObjects":
from modules.security.rootAccess import getRootUser
return SubscriptionObjects(getRootUser(), mandateId=None)
@ -96,7 +96,7 @@ class SubscriptionObjects:
# =========================================================================
def getPlan(self, planKey: str) -> Optional[SubscriptionPlan]:
return _getPlan(planKey)
return getPlanFromCatalog(planKey)
def getSelectablePlans(self) -> List[SubscriptionPlan]:
return _getSelectablePlans()

View file

@ -247,8 +247,8 @@ def getRecordsetWithRBAC(
# Handle JSONB fields and ensure numeric types are correct
# Import the helper function from connector module
from modules.connectors.connectorDbPostgre import _get_model_fields
fields = _get_model_fields(modelClass)
from modules.connectors.connectorDbPostgre import getModelFields
fields = getModelFields(modelClass)
for record in records:
for fieldName, fieldType in fields.items():
# Ensure numeric fields are properly typed
@ -379,8 +379,8 @@ def getRecordsetPaginatedWithRBAC(
whereValues.append(value)
if pagination and pagination.filters:
from modules.connectors.connectorDbPostgre import _get_model_fields
fields = _get_model_fields(modelClass)
from modules.connectors.connectorDbPostgre import getModelFields
fields = getModelFields(modelClass)
validColumns = set(fields.keys())
for key, val in pagination.filters.items():
if key == "search" and isinstance(val, str) and val.strip():
@ -440,8 +440,8 @@ def getRecordsetPaginatedWithRBAC(
orderParts: List[str] = []
if pagination and pagination.sort:
from modules.connectors.connectorDbPostgre import _get_model_fields
validColumns = set(_get_model_fields(modelClass).keys())
from modules.connectors.connectorDbPostgre import getModelFields
validColumns = set(getModelFields(modelClass).keys())
for sf in pagination.sort:
if sf.field in validColumns:
direction = "DESC" if sf.direction.lower() == "desc" else "ASC"
@ -464,10 +464,10 @@ def getRecordsetPaginatedWithRBAC(
cursor.execute(dataSql, whereValues)
records = [dict(row) for row in cursor.fetchall()]
from modules.connectors.connectorDbPostgre import _get_model_fields, _parseRecordFields
fields = _get_model_fields(modelClass)
from modules.connectors.connectorDbPostgre import getModelFields, parseRecordFields
fields = getModelFields(modelClass)
for record in records:
_parseRecordFields(record, fields, f"table {table}")
parseRecordFields(record, fields, f"table {table}")
for fieldName, fieldType in fields.items():
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
modelFields = modelClass.model_fields
@ -484,12 +484,15 @@ def getRecordsetPaginatedWithRBAC(
if enrichPermissions:
records = _enrichRecordsWithPermissions(records, permissions, currentUser)
if pagination:
pageSize = pagination.pageSize
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
return PaginatedResult(items=records, totalItems=totalItems, totalPages=totalPages)
from modules.routes.routeHelpers import enrichRowsWithFkLabels
enrichRowsWithFkLabels(records, modelClass)
return records
if pagination:
pageSize = pagination.pageSize
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
return PaginatedResult(items=records, totalItems=totalItems, totalPages=totalPages)
return records
except Exception as e:
logger.error(f"Error in getRecordsetPaginatedWithRBAC for table {table}: {e}")
return PaginatedResult(items=[], totalItems=0, totalPages=0) if pagination else []
@ -518,8 +521,8 @@ def getDistinctColumnValuesWithRBAC(
if not connector._ensureTableExists(modelClass):
return []
from modules.connectors.connectorDbPostgre import _get_model_fields
fields = _get_model_fields(modelClass)
from modules.connectors.connectorDbPostgre import getModelFields
fields = getModelFields(modelClass)
if column not in fields:
return []
@ -614,21 +617,34 @@ def getDistinctColumnValuesWithRBAC(
whereClause = " WHERE " + " AND ".join(whereConditions) if whereConditions else ""
notNullCond = f'"{column}" IS NOT NULL AND "{column}"::TEXT != \'\''
if whereClause:
whereClause += f" AND {notNullCond}"
nonNullWhere = whereClause + f" AND {notNullCond}"
else:
whereClause = f" WHERE {notNullCond}"
nonNullWhere = f" WHERE {notNullCond}"
sql = f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{whereClause} ORDER BY val'
sql = f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{nonNullWhere} ORDER BY val'
with connector.connection.cursor() as cursor:
cursor.execute(sql, whereValues)
return [row["val"] for row in cursor.fetchall()]
result = [row["val"] for row in cursor.fetchall()]
# Include a None entry when NULL/empty rows exist (enables "(Leer)" filter)
emptyCond = f'("{column}" IS NULL OR "{column}"::TEXT = \'\')'
if whereClause:
emptySql = f'SELECT 1 FROM "{table}"{whereClause} AND {emptyCond} LIMIT 1'
else:
emptySql = f'SELECT 1 FROM "{table}" WHERE {emptyCond} LIMIT 1'
with connector.connection.cursor() as cursor:
cursor.execute(emptySql, whereValues)
if cursor.fetchone():
result.append(None)
return result
except Exception as e:
logger.error(f"Error in getDistinctColumnValuesWithRBAC for {table}.{column}: {e}")
return []
def _buildFilesScopeWhereClause(
def buildFilesScopeWhereClause(
currentUser: User,
table: str,
connector,
@ -673,7 +689,7 @@ def _buildFilesScopeWhereClause(
if instances:
effectiveMandateId = instances[0].get("mandateId") or ""
except Exception as e:
logger.warning(f"_buildFilesScopeWhereClause: could not resolve mandate for instance {featureInstanceId}: {e}")
logger.warning(f"buildFilesScopeWhereClause: could not resolve mandate for instance {featureInstanceId}: {e}")
scopeParts: List[str] = []
scopeValues: List = []
@ -757,7 +773,7 @@ def buildRbacWhereClause(
namespaceAll = TABLE_NAMESPACE.get(table, "system")
# Files: scope-based context filtering applies even with ALL access
if namespaceAll == "files":
return _buildFilesScopeWhereClause(
return buildFilesScopeWhereClause(
currentUser, table, connector, mandateId, featureInstanceId,
baseConditions, baseValues,
)
@ -811,7 +827,7 @@ def buildRbacWhereClause(
# - scope='featureInstance' → visible to users with access to that instance
# - scope='personal' → only visible to owner (sysCreatedBy)
if namespace == "files":
return _buildFilesScopeWhereClause(
return buildFilesScopeWhereClause(
currentUser, table, connector, mandateId, featureInstanceId,
baseConditions, baseValues,
)

View file

@ -28,9 +28,9 @@ def listDemoConfigs(
currentUser: User = Depends(requirePlatformAdmin),
) -> dict:
"""List all available demo configurations."""
from modules.demoConfigs import _getAvailableDemoConfigs
from modules.demoConfigs import getAvailableDemoConfigs
configs = _getAvailableDemoConfigs()
configs = getAvailableDemoConfigs()
return {
"configs": [cfg.toDict() for cfg in configs.values()],
}
@ -44,9 +44,9 @@ def loadDemoConfig(
currentUser: User = Depends(requirePlatformAdmin),
) -> dict:
"""Load (create) a demo configuration. Idempotent."""
from modules.demoConfigs import _getDemoConfigByCode
from modules.demoConfigs import getDemoConfigByCode
config = _getDemoConfigByCode(code)
config = getDemoConfigByCode(code)
if not config:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
@ -69,9 +69,9 @@ def removeDemoConfig(
currentUser: User = Depends(requirePlatformAdmin),
) -> dict:
"""Remove all data created by a demo configuration."""
from modules.demoConfigs import _getDemoConfigByCode
from modules.demoConfigs import getDemoConfigByCode
config = _getDemoConfigByCode(code)
config = getDemoConfigByCode(code)
if not config:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,

View file

@ -18,7 +18,7 @@ import json
import math
from pydantic import BaseModel, Field
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
from modules.routes.routeHelpers import _applyFiltersAndSort, handleFilterValuesInMemory, handleIdsInMemory
from modules.routes.routeHelpers import applyFiltersAndSort, handleFilterValuesInMemory, handleIdsInMemory
from modules.auth import limiter, getRequestContext, RequestContext, requirePlatformAdmin
from modules.datamodels.datamodelUam import User, UserInDB
@ -481,7 +481,7 @@ def list_feature_instances(
return handleIdsInMemory(items, pagination)
if paginationParams:
filtered = _applyFiltersAndSort(items, paginationParams)
filtered = applyFiltersAndSort(items, paginationParams)
totalItems = len(filtered)
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
@ -1019,7 +1019,7 @@ def list_template_roles(
if mode == "ids":
return handleIdsInMemory(enriched, pagination)
filtered = _applyFiltersAndSort(enriched, paginationParams)
filtered = applyFiltersAndSort(enriched, paginationParams)
if paginationParams:
totalItems = len(filtered)
@ -1223,7 +1223,7 @@ def list_feature_instance_users(
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
if paginationParams:
filtered = _applyFiltersAndSort(items, paginationParams)
filtered = applyFiltersAndSort(items, paginationParams)
totalItems = len(filtered)
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
startIdx = (paginationParams.page - 1) * paginationParams.pageSize

View file

@ -69,14 +69,24 @@ def _applySortFilterSearch(
return items
def _distinctColumnValues(items: List[Dict[str, Any]], column: str) -> List[str]:
"""Extract sorted distinct non-empty string values for a column."""
def _distinctColumnValues(items: List[Dict[str, Any]], column: str) -> List[Optional[str]]:
"""Extract sorted distinct values for a column.
Includes ``None`` as the last entry when at least one row has a null/empty
value this enables the "(Leer)" filter option in the frontend.
"""
vals = set()
hasEmpty = False
for r in items:
v = r.get(column)
if v is not None and v != "":
vals.add(str(v))
return sorted(vals)
if v is None or v == "":
hasEmpty = True
continue
vals.add(str(v))
result: List[Optional[str]] = sorted(vals)
if hasEmpty:
result.append(None)
return result
def _enrichUserAndInstanceLabels(
@ -87,46 +97,32 @@ def _enrichUserAndInstanceLabels(
instanceKey: str = "featureInstanceId",
instanceLabelKey: str = "instanceLabel",
) -> None:
"""Resolve userId → username and featureInstanceId → label in-place."""
userIds = set()
instanceIds = set()
for r in items:
uid = r.get(userKey)
if uid and not r.get(usernameKey):
userIds.add(uid)
iid = r.get(instanceKey)
if iid:
instanceIds.add(iid)
"""Resolve userId -> username and featureInstanceId -> label in-place.
userMap: Dict[str, str] = {}
instanceMap: Dict[str, str] = {}
Uses the central resolvers from routeHelpers. Returns None (not the raw ID)
for unresolvable entries so the frontend can distinguish "resolved" from
"missing".
"""
from modules.routes.routeHelpers import resolveUserLabels, resolveInstanceLabels
try:
from modules.interfaces.interfaceDbApp import getInterface
appIf = getInterface(
context.user,
mandateId=str(context.mandateId) if context.mandateId else None,
)
if userIds:
users = appIf.getUsersByIds(list(userIds))
for uid, u in users.items():
name = getattr(u, "displayName", None) or getattr(u, "email", None) or uid
userMap[uid] = name
if instanceIds:
for iid in instanceIds:
fi = appIf.getFeatureInstance(iid)
if fi:
instanceMap[iid] = getattr(fi, "label", None) or getattr(fi, "featureCode", None) or iid
except Exception as e:
logger.debug("_enrichUserAndInstanceLabels: %s", e)
userIds = list({r.get(userKey) for r in items if r.get(userKey) and not r.get(usernameKey)})
instanceIds = list({r.get(instanceKey) for r in items if r.get(instanceKey)})
userMap: Dict[str, Optional[str]] = {}
instanceMap: Dict[str, Optional[str]] = {}
if userIds:
userMap = resolveUserLabels(userIds)
if instanceIds:
instanceMap = resolveInstanceLabels(instanceIds)
for r in items:
uid = r.get(userKey)
if uid and not r.get(usernameKey) and uid in userMap:
r[usernameKey] = userMap[uid]
iid = r.get(instanceKey)
if iid and iid in instanceMap:
r[instanceLabelKey] = instanceMap[iid]
if iid:
r[instanceLabelKey] = instanceMap.get(iid)
def _requireAuditAccess(context: RequestContext):

View file

@ -20,7 +20,7 @@ from pydantic import BaseModel, Field
from modules.auth import limiter, requirePlatformAdmin, getRequestContext, RequestContext
# Import billing components
from modules.interfaces.interfaceDbBilling import getInterface as getBillingInterface, _getRootInterface
from modules.interfaces.interfaceDbBilling import getInterface as getBillingInterface, getRootInterface
from modules.serviceCenter.services.serviceBilling.mainServiceBilling import getService as getBillingService
import json
import math
@ -140,44 +140,46 @@ def _getBillingDataScope(user) -> BillingDataScope:
def _isAdminOfMandate(ctx: RequestContext, targetMandateId: str) -> bool:
"""Check if user is PlatformAdmin or admin of the specified mandate."""
"""Check if user is PlatformAdmin or admin of the specified mandate.
Fail-loud: any DB/lookup error is logged at ERROR and re-raised. We never
silently return False that would mask infrastructure outages as "no
permission" and produce confusing 403s instead of actionable 500s.
"""
if ctx.isPlatformAdmin:
return True
try:
from modules.interfaces.interfaceDbApp import getRootInterface
rootInterface = getRootInterface()
userMandates = rootInterface.getUserMandates(str(ctx.user.id))
for um in userMandates:
if str(getattr(um, 'mandateId', None)) != str(targetMandateId):
continue
if not getattr(um, 'enabled', True):
continue
umId = str(getattr(um, 'id', ''))
roleIds = rootInterface.getRoleIdsForUserMandate(umId)
for roleId in roleIds:
role = rootInterface.getRole(roleId)
if role and role.roleLabel == "admin" and not role.featureInstanceId:
return True
return False
except Exception:
return False
from modules.interfaces.interfaceDbApp import getRootInterface
rootInterface = getRootInterface()
userMandates = rootInterface.getUserMandates(str(ctx.user.id))
for um in userMandates:
if str(getattr(um, 'mandateId', None)) != str(targetMandateId):
continue
if not getattr(um, 'enabled', True):
continue
umId = str(getattr(um, 'id', ''))
roleIds = rootInterface.getRoleIdsForUserMandate(umId)
for roleId in roleIds:
role = rootInterface.getRole(roleId)
if role and role.roleLabel == "admin" and not role.featureInstanceId:
return True
return False
def _isMemberOfMandate(ctx: RequestContext, targetMandateId: str) -> bool:
"""Check if user has any enabled membership in the specified mandate."""
try:
from modules.interfaces.interfaceDbApp import getRootInterface
rootInterface = getRootInterface()
userMandates = rootInterface.getUserMandates(str(ctx.user.id))
for um in userMandates:
if str(getattr(um, 'mandateId', None)) != str(targetMandateId):
continue
if not getattr(um, 'enabled', True):
continue
return True
return False
except Exception:
return False
"""Check if user has any enabled membership in the specified mandate.
Fail-loud: see _isAdminOfMandate above for the same rationale.
"""
from modules.interfaces.interfaceDbApp import getRootInterface
rootInterface = getRootInterface()
userMandates = rootInterface.getUserMandates(str(ctx.user.id))
for um in userMandates:
if str(getattr(um, 'mandateId', None)) != str(targetMandateId):
continue
if not getattr(um, 'enabled', True):
continue
return True
return False
@ -887,7 +889,7 @@ def confirmCheckoutSession(
if not _isAdminOfMandate(ctx, mandate_id):
raise HTTPException(status_code=403, detail=routeApiMsg("Mandate admin role required"))
root_billing_interface = _getRootInterface()
root_billing_interface = getRootInterface()
return _creditStripeSessionIfNeeded(root_billing_interface, session_dict, eventId=None)
except HTTPException:
raise
@ -957,10 +959,10 @@ async def stripeWebhook(
sessionMode = session.get("mode") if hasattr(session, "get") else getattr(session, "mode", None)
if sessionMode == "subscription":
_handleSubscriptionCheckoutCompleted(session, event_id)
handleSubscriptionCheckoutCompleted(session, event_id)
return {"received": True}
billingInterface = _getRootInterface()
billingInterface = getRootInterface()
if billingInterface.getStripeWebhookEventByEventId(event_id):
logger.info(f"Stripe event {event_id} already processed, skipping")
return {"received": True}
@ -997,11 +999,11 @@ async def stripeWebhook(
return {"received": True}
def _handleSubscriptionCheckoutCompleted(session, eventId: str) -> None:
def handleSubscriptionCheckoutCompleted(session, eventId: str) -> None:
"""Handle checkout.session.completed for mode=subscription.
Resolves the local PENDING record by ID from webhook metadata and transitions it."""
from modules.interfaces.interfaceDbSubscription import _getRootInterface as getSubRootInterface
from modules.datamodels.datamodelSubscription import SubscriptionStatusEnum, _getPlan
from modules.interfaces.interfaceDbSubscription import getRootInterface as getSubRootInterface
from modules.datamodels.datamodelSubscription import SubscriptionStatusEnum, getPlan
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import (
getService as getSubscriptionService,
_notifySubscriptionChange,
@ -1033,8 +1035,16 @@ def _handleSubscriptionCheckoutCompleted(session, eventId: str) -> None:
mandateId = metadata.get("mandateId")
planKey = metadata.get("planKey", "")
platformUrl = platformUrl or metadata.get("platformUrl", "")
except Exception:
pass
except Exception as e:
# Stripe lookup is the only way to recover the metadata at this
# point — if it fails we MUST surface it, otherwise the webhook
# later short-circuits with "missing metadata" and the user
# silently gets stuck in PENDING.
logger.error(
"Stripe Subscription.retrieve(%s) failed during checkout "
"metadata recovery: %s", stripeSub, e,
)
raise
stripeSubId = session.get("subscription")
@ -1083,7 +1093,17 @@ def _handleSubscriptionCheckoutCompleted(session, eventId: str) -> None:
elif priceMapping and priceId == priceMapping.stripePriceIdInstances:
stripeData["stripeItemIdInstances"] = item["id"]
except Exception as e:
logger.error("Error retrieving Stripe subscription %s: %s", stripeSubId, e)
# Without these enrichment fields the activation completes anyway
# (status flips to ACTIVE/SCHEDULED below), but periods + Stripe
# item-IDs are missing on the local record, which breaks later
# add-on billing and renewal accounting. Re-raise so the webhook
# is retried by Stripe instead of silently shipping a broken row.
logger.error(
"Error retrieving Stripe subscription %s during checkout "
"completion (will be retried by Stripe): %s",
stripeSubId, e,
)
raise
if stripeData:
subInterface.updateFields(subscriptionRecordId, stripeData)
@ -1136,12 +1156,12 @@ def _handleSubscriptionCheckoutCompleted(session, eventId: str) -> None:
subService.invalidateCache(mandateId)
if toStatus == SubscriptionStatusEnum.ACTIVE:
plan = _getPlan(planKey)
plan = getPlan(planKey)
updatedSub = subInterface.getById(subscriptionRecordId)
_notifySubscriptionChange(mandateId, "activated", plan, subscriptionRecord=updatedSub, platformUrl=platformUrl)
try:
billingIf = _getRootInterface()
billingIf = getRootInterface()
billingIf.creditSubscriptionBudget(mandateId, planKey, periodLabel="Erstaktivierung")
except Exception as ex:
logger.error("creditSubscriptionBudget on activation failed: %s", ex)
@ -1155,8 +1175,8 @@ def _handleSubscriptionCheckoutCompleted(session, eventId: str) -> None:
def _handleSubscriptionWebhook(event) -> None:
"""Process Stripe subscription webhook events.
All record resolution is by stripeSubscriptionId no mandate-based guessing."""
from modules.interfaces.interfaceDbSubscription import _getRootInterface as getSubRootInterface
from modules.datamodels.datamodelSubscription import SubscriptionStatusEnum, _getPlan
from modules.interfaces.interfaceDbSubscription import getRootInterface as getSubRootInterface
from modules.datamodels.datamodelSubscription import SubscriptionStatusEnum, getPlan
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import (
getService as getSubscriptionService,
_notifySubscriptionChange,
@ -1205,11 +1225,11 @@ def _handleSubscriptionWebhook(event) -> None:
subInterface.transitionStatus(subId, SubscriptionStatusEnum.SCHEDULED, SubscriptionStatusEnum.ACTIVE)
subService.invalidateCache(mandateId)
planKey = sub.get("planKey", "")
plan = _getPlan(planKey)
plan = getPlan(planKey)
refreshedSub = subInterface.getById(subId)
_notifySubscriptionChange(mandateId, "activated", plan, subscriptionRecord=refreshedSub, platformUrl=webhookPlatformUrl)
try:
_getRootInterface().creditSubscriptionBudget(mandateId, planKey, periodLabel="Erstaktivierung")
getRootInterface().creditSubscriptionBudget(mandateId, planKey, periodLabel="Erstaktivierung")
except Exception as ex:
logger.error("creditSubscriptionBudget SCHEDULED->ACTIVE failed: %s", ex)
logger.info("SCHEDULED -> ACTIVE for sub %s (mandate %s)", subId, mandateId)
@ -1245,7 +1265,7 @@ def _handleSubscriptionWebhook(event) -> None:
scheduled["id"], SubscriptionStatusEnum.SCHEDULED, SubscriptionStatusEnum.ACTIVE,
)
subService.invalidateCache(mandateId)
plan = _getPlan(scheduled.get("planKey", ""))
plan = getPlan(scheduled.get("planKey", ""))
refreshedScheduled = subInterface.getById(scheduled["id"])
_notifySubscriptionChange(mandateId, "activated", plan, subscriptionRecord=refreshedScheduled, platformUrl=webhookPlatformUrl)
logger.info("Promoted SCHEDULED sub %s -> ACTIVE (mandate %s)", scheduled["id"], mandateId)
@ -1256,7 +1276,7 @@ def _handleSubscriptionWebhook(event) -> None:
if currentStatus == SubscriptionStatusEnum.ACTIVE:
subInterface.transitionStatus(subId, SubscriptionStatusEnum.ACTIVE, SubscriptionStatusEnum.PAST_DUE)
subService.invalidateCache(mandateId)
plan = _getPlan(sub.get("planKey", ""))
plan = getPlan(sub.get("planKey", ""))
_notifySubscriptionChange(mandateId, "payment_failed", plan, subscriptionRecord=sub, platformUrl=webhookPlatformUrl)
logger.info("Payment failed for sub %s (mandate %s)", subId, mandateId)
@ -1283,7 +1303,7 @@ def _handleSubscriptionWebhook(event) -> None:
period_start_at = datetime.fromtimestamp(int(period_ts), tz=timezone.utc)
periodLabel = period_start_at.strftime("%Y-%m-%d")
try:
billing_if = _getRootInterface()
billing_if = getRootInterface()
billing_if.resetStorageBillingPeriod(mandateId, period_start_at)
billing_if.reconcileMandateStorageBilling(mandateId)
except Exception as ex:
@ -1291,7 +1311,7 @@ def _handleSubscriptionWebhook(event) -> None:
planKey = sub.get("planKey", "")
try:
billing_if = _getRootInterface()
billing_if = getRootInterface()
billing_if.creditSubscriptionBudget(mandateId, planKey, periodLabel=periodLabel or "Periodenverlängerung")
except Exception as ex:
logger.error("creditSubscriptionBudget on invoice.paid failed: %s", ex)
@ -1408,28 +1428,21 @@ def getUsersForMandate(
def _attachCreatedByUserNamesToTransactionRows(rows: List[Dict[str, Any]]) -> None:
"""Resolve createdByUserId to userName using root app interface (sysadmin transaction views)."""
try:
from modules.interfaces.interfaceDbApp import getRootInterface
"""Resolve createdByUserId to userName using central FK resolvers.
appRoot = getRootInterface()
userNames: Dict[str, str] = {}
for row in rows:
uid = row.get("createdByUserId")
if not uid:
row["userName"] = ""
continue
if uid not in userNames:
try:
u = appRoot.getUser(uid)
userNames[uid] = u.username if u else uid[:8]
except Exception:
userNames[uid] = uid[:8]
row["userName"] = userNames.get(uid, "")
except Exception:
for row in rows:
uid = row.get("createdByUserId")
row["userName"] = uid[:8] if uid else ""
Returns None (not a truncated UUID) for unresolvable IDs so the frontend
renders an explicit NA() indicator instead of a misleading 8-char snippet.
"""
from modules.routes.routeHelpers import resolveUserLabels
userIds = list({r.get("createdByUserId") for r in rows if r.get("createdByUserId")})
userMap: Dict[str, Optional[str]] = {}
if userIds:
userMap = resolveUserLabels(userIds)
for row in rows:
uid = row.get("createdByUserId")
row["userName"] = userMap.get(uid) if uid else None
def _enrichTransactionRows(transactions) -> List[Dict[str, Any]]:
@ -1717,18 +1730,13 @@ def getUserViewStatistics(
for acc in allAccounts:
accountToMandate[acc.get("id", "")] = acc.get("mandateId", "")
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
mandateIdsForLookup = list(set(accountToMandate.values()))
mandateMap: Dict[str, str] = {}
if mandateIdsForLookup:
rootIface = getAppInterface(ctx.user)
mandatesById = rootIface.getMandatesByIds(mandateIdsForLookup)
for mid, m in mandatesById.items():
mandateMap[mid] = getattr(m, "name", mid) or mid
from modules.routes.routeHelpers import resolveMandateLabels
mandateIdsForLookup = list({v for v in accountToMandate.values() if v})
mandateMap: Dict[str, Optional[str]] = resolveMandateLabels(mandateIdsForLookup) if mandateIdsForLookup else {}
def _mandateName(accountId: str) -> str:
mid = accountToMandate.get(accountId, "")
return mandateMap.get(mid, mid or "unknown")
return mandateMap.get(mid) or f"NA({mid})" if mid else "unknown"
costByMandate: Dict[str, float] = {}
for accId, total in agg.get("costByAccountId", {}).items():

View file

@ -127,7 +127,7 @@ def get_auth_authority_options(
# CRUD ENDPOINTS
# ============================================================================
@router.get("/", response_model=PaginatedResponse[UserConnection])
@router.get("/")
@limiter.limit("30/minute")
async def get_connections(
request: Request,
@ -135,7 +135,7 @@ async def get_connections(
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
currentUser: User = Depends(getCurrentUser)
) -> PaginatedResponse[UserConnection]:
):
"""Get connections for the current user with optional pagination, sorting, and filtering.
SECURITY: This endpoint is secure - users can only see their own connections.
@ -151,7 +151,7 @@ async def get_connections(
- GET /api/connections/?mode=filterValues&column=status
- GET /api/connections/?mode=ids
"""
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels
def _buildEnhancedItems():
interface = getInterface(currentUser)
@ -252,27 +252,13 @@ async def get_connections(
}
enhanced_connections_dict.append(connection_dict)
# If no pagination requested, return all items
enrichRowsWithFkLabels(enhanced_connections_dict, UserConnection)
if paginationParams is None:
# Convert back to UserConnection objects (enum strings are already in dict)
items = []
for conn_dict in enhanced_connections_dict:
conn_dict_copy = dict(conn_dict)
if "authority" in conn_dict_copy and isinstance(conn_dict_copy["authority"], str):
try:
conn_dict_copy["authority"] = AuthAuthority(conn_dict_copy["authority"])
except ValueError:
pass
if "status" in conn_dict_copy and isinstance(conn_dict_copy["status"], str):
try:
conn_dict_copy["status"] = ConnectionStatus(conn_dict_copy["status"])
except ValueError:
pass
items.append(UserConnection(**conn_dict_copy))
return PaginatedResponse(
items=items,
pagination=None
)
return {
"items": enhanced_connections_dict,
"pagination": None,
}
# Apply filtering if provided
if paginationParams.filters:
@ -292,43 +278,24 @@ async def get_connections(
paginationParams.sort
)
# Count total items after filters
totalItems = len(enhanced_connections_dict)
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
# Apply pagination (skip/limit)
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
endIdx = startIdx + paginationParams.pageSize
paged_connections = enhanced_connections_dict[startIdx:endIdx]
# Convert back to UserConnection objects (convert enum strings back to enums)
items = []
for conn_dict in paged_connections:
# Convert enum strings back to enum objects
conn_dict_copy = dict(conn_dict)
if "authority" in conn_dict_copy and isinstance(conn_dict_copy["authority"], str):
try:
conn_dict_copy["authority"] = AuthAuthority(conn_dict_copy["authority"])
except ValueError:
pass # Keep as string if invalid
if "status" in conn_dict_copy and isinstance(conn_dict_copy["status"], str):
try:
conn_dict_copy["status"] = ConnectionStatus(conn_dict_copy["status"])
except ValueError:
pass # Keep as string if invalid
items.append(UserConnection(**conn_dict_copy))
return PaginatedResponse(
items=items,
pagination=PaginationMetadata(
return {
"items": paged_connections,
"pagination": PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=totalItems,
totalPages=totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
)
)
).model_dump(),
}
except HTTPException:
raise

View file

@ -17,6 +17,7 @@ from modules.shared.attributeUtils import getModelAttributeDefinitions
from modules.datamodels.datamodelUam import User
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata, normalize_pagination_dict
from modules.shared.i18nRegistry import apiRouteContext
from modules.routes.routeHelpers import enrichRowsWithFkLabels
routeApiMsg = apiRouteContext("routeDataFiles")
# Configure logger
@ -220,7 +221,7 @@ router = APIRouter(
}
)
@router.get("/list", response_model=PaginatedResponse[FileItem])
@router.get("/list")
@limiter.limit("120/minute")
def get_files(
request: Request,
@ -229,7 +230,7 @@ def get_files(
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
currentUser: User = Depends(getCurrentUser),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[FileItem]:
):
"""
Get files with optional pagination, sorting, and filtering.
@ -303,24 +304,27 @@ def get_files(
recordFilter = {"folderId": fVal}
result = managementInterface.getAllFiles(pagination=paginationParams, recordFilter=recordFilter)
def _filesToDicts(items):
return [f.model_dump() if hasattr(f, "model_dump") else (dict(f) if not isinstance(f, dict) else f) for f in items]
if paginationParams:
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
enriched = enrichRowsWithFkLabels(_filesToDicts(result.items), FileItem)
return {
"items": enriched,
"pagination": PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
)
)
).model_dump(),
}
else:
return PaginatedResponse(
items=result,
pagination=None
)
items = result if isinstance(result, list) else (result.items if hasattr(result, "items") else [result])
enriched = enrichRowsWithFkLabels(_filesToDicts(items), FileItem)
return {"items": enriched, "pagination": None}
except HTTPException:
raise
except Exception as e:
@ -1019,14 +1023,14 @@ def updateFileNeutralize(
# ── File endpoints with path parameters (catch-all /{fileId}) ─────────────────
@router.get("/{fileId}", response_model=FileItem)
@router.get("/{fileId}")
@limiter.limit("30/minute")
def get_file(
request: Request,
fileId: str = Path(..., description="ID of the file"),
currentUser: User = Depends(getCurrentUser),
context: RequestContext = Depends(getRequestContext)
) -> FileItem:
):
"""Get a file. Resolves the file's mandate/instance scope automatically."""
try:
_mgmt, fileData = _resolveFileWithScope(currentUser, context, fileId)
@ -1036,7 +1040,9 @@ def get_file(
detail=f"File with ID {fileId} not found"
)
return fileData
fileDict = fileData.model_dump() if hasattr(fileData, "model_dump") else dict(fileData)
enriched = enrichRowsWithFkLabels([fileDict], FileItem)
return enriched[0]
except interfaceDbManagement.FileNotFoundError as e:
logger.warning(f"File not found: {str(e)}")

View file

@ -22,7 +22,7 @@ from modules.auth import limiter, requirePlatformAdmin, getRequestContext, getCu
# Import interfaces
import modules.interfaces.interfaceDbApp as interfaceDbApp
from modules.interfaces.interfaceDbBilling import _getRootInterface as _getBillingRootInterface
from modules.interfaces.interfaceDbBilling import getRootInterface as _getBillingRootInterface
from modules.shared.attributeUtils import getModelAttributeDefinitions
from modules.shared.auditLogger import audit_logger
@ -318,7 +318,7 @@ def create_mandate(
from modules.datamodels.datamodelSubscription import (
MandateSubscription, SubscriptionStatusEnum, BUILTIN_PLANS,
)
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
from datetime import datetime, timezone, timedelta
planKey = mandateData.get("planKey", "TRIAL_14D")
@ -660,7 +660,7 @@ def list_mandate_users(
from modules.routes.routeHelpers import (
handleFilterValuesInMemory, handleIdsInMemory,
_applyFiltersAndSort as _sharedApplyFiltersAndSort,
applyFiltersAndSort as _sharedApplyFiltersAndSort,
paginateInMemory,
)
@ -674,13 +674,23 @@ def list_mandate_users(
if paginationParams:
paginationParamsObj = None
try:
paginationDict = json.loads(pagination) if pagination else None
if pagination:
try:
paginationDict = json.loads(pagination)
except json.JSONDecodeError as e:
raise HTTPException(
status_code=400,
detail=f"Invalid 'pagination' query: not valid JSON ({e.msg})",
)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
paginationParamsObj = PaginationParams(**paginationDict)
except Exception:
pass
try:
paginationDict = normalize_pagination_dict(paginationDict)
paginationParamsObj = PaginationParams(**paginationDict)
except Exception as e:
raise HTTPException(
status_code=400,
detail=f"Invalid 'pagination' payload: {e}",
)
filtered = _sharedApplyFiltersAndSort(result, paginationParamsObj)
totalItems = len(filtered)

View file

@ -44,20 +44,25 @@ def get_prompts(
- filterValues: distinct values for a column (cross-filtered)
- ids: all IDs matching current filters
"""
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels
def _promptsToEnrichedDicts(promptItems):
dicts = [r.model_dump() if hasattr(r, 'model_dump') else (dict(r) if not isinstance(r, dict) else r) for r in promptItems]
enrichRowsWithFkLabels(dicts, Prompt)
return dicts
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
managementInterface = interfaceDbManagement.getInterface(currentUser)
result = managementInterface.getAllPrompts(pagination=None)
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in result]
items = _promptsToEnrichedDicts(result)
return handleFilterValuesInMemory(items, column, pagination)
if mode == "ids":
managementInterface = interfaceDbManagement.getInterface(currentUser)
result = managementInterface.getAllPrompts(pagination=None)
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in result]
items = _promptsToEnrichedDicts(result)
return handleIdsInMemory(items, pagination)
paginationParams = None
@ -74,22 +79,24 @@ def get_prompts(
result = managementInterface.getAllPrompts(pagination=paginationParams)
if paginationParams:
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
items = _promptsToEnrichedDicts(result.items)
return {
"items": items,
"pagination": PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
)
)
).model_dump(),
}
else:
return PaginatedResponse(
items=result,
pagination=None
)
items = _promptsToEnrichedDicts(result)
return {
"items": items,
"pagination": None,
}
@router.post("", response_model=Prompt)

View file

@ -25,12 +25,17 @@ from modules.datamodels.datamodelUam import User, UserInDB, AuthAuthority
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata, normalize_pagination_dict
from modules.shared.i18nRegistry import apiRouteContext
from modules.routes.routeHelpers import enrichRowsWithFkLabels
routeApiMsg = apiRouteContext("routeDataUsers")
# Configure logger
logger = logging.getLogger(__name__)
def _usersToDicts(items) -> list:
return [u.model_dump() if hasattr(u, "model_dump") else (dict(u) if not isinstance(u, dict) else u) for u in items]
def _isAdminForUser(context: RequestContext, targetUserId: str) -> bool:
"""
Check if the current user has admin rights for the target user.
@ -187,7 +192,7 @@ def get_user_options(
# CRUD ENDPOINTS
# ============================================================================
@router.get("/", response_model=PaginatedResponse[User])
@router.get("/")
@limiter.limit("30/minute")
def get_users(
request: Request,
@ -195,7 +200,7 @@ def get_users(
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[User]:
):
"""
Get users with optional pagination, sorting, and filtering.
MULTI-TENANT: mandateId from X-Mandate-Id header determines scope.
@ -236,48 +241,44 @@ def get_users(
# Get users for specific mandate using getUsersByMandate
result = appInterface.getUsersByMandate(str(context.mandateId), paginationParams)
# getUsersByMandate returns PaginatedResult if pagination was provided
if paginationParams and hasattr(result, 'items'):
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
enriched = enrichRowsWithFkLabels(_usersToDicts(result.items), User)
return {
"items": enriched,
"pagination": PaginationMetadata(
currentPage=result.currentPage,
pageSize=result.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
)
)
).model_dump(),
}
else:
# No pagination - result is a list
users = result if isinstance(result, list) else result.items if hasattr(result, 'items') else []
return PaginatedResponse(
items=users,
pagination=None
)
enriched = enrichRowsWithFkLabels(_usersToDicts(users), User)
return {"items": enriched, "pagination": None}
elif context.isPlatformAdmin:
# PlatformAdmin without mandateId — DB-level pagination via interface
result = appInterface.getAllUsers(paginationParams)
if paginationParams and hasattr(result, 'items'):
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
enriched = enrichRowsWithFkLabels(_usersToDicts(result.items), User)
return {
"items": enriched,
"pagination": PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
)
)
).model_dump(),
}
else:
users = result if isinstance(result, list) else (result.items if hasattr(result, 'items') else [])
return PaginatedResponse(
items=users,
pagination=None
)
enriched = enrichRowsWithFkLabels(_usersToDicts(users), User)
return {"items": enriched, "pagination": None}
else:
# Non-SysAdmin without mandateId: aggregate users across all admin mandates
rootInterface = getRootInterface()
@ -316,34 +317,30 @@ def get_users(
for u in batchUsers.values()
]
from modules.routes.routeHelpers import _applyFiltersAndSort as _applyFiltersAndSortHelper
from modules.routes.routeHelpers import applyFiltersAndSort as _applyFiltersAndSortHelper
filteredUsers = _applyFiltersAndSortHelper(allUsers, paginationParams)
users = [User(**u) for u in filteredUsers]
enriched = enrichRowsWithFkLabels(filteredUsers, User)
if paginationParams:
import math
totalItems = len(users)
totalItems = len(enriched)
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
endIdx = startIdx + paginationParams.pageSize
paginatedUsers = users[startIdx:endIdx]
return PaginatedResponse(
items=paginatedUsers,
pagination=PaginationMetadata(
return {
"items": enriched[startIdx:endIdx],
"pagination": PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=totalItems,
totalPages=totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
)
)
).model_dump(),
}
else:
return PaginatedResponse(
items=users,
pagination=None
)
return {"items": enriched, "pagination": None}
except HTTPException:
raise
except Exception as e:
@ -753,10 +750,10 @@ def send_password_link(
expiryHours = int(APP_CONFIG.get("Auth_RESET_TOKEN_EXPIRY_HOURS", "24"))
try:
from modules.routes.routeSecurityLocal import _buildAuthEmailHtml, _sendAuthEmail
from modules.routes.routeSecurityLocal import buildAuthEmailHtml, sendAuthEmail
emailSubject = "PowerOn - Passwort setzen"
emailHtml = _buildAuthEmailHtml(
emailHtml = buildAuthEmailHtml(
greeting=f"Hallo {targetUser.fullName or targetUser.username}",
bodyLines=[
"Ein Administrator hat einen Link zum Setzen Ihres Passworts angefordert.",
@ -770,7 +767,7 @@ def send_password_link(
footerText=f"Dieser Link ist {expiryHours} Stunden gültig. Falls Sie diese Anforderung nicht erwartet haben, kontaktieren Sie bitte Ihren Administrator.",
)
emailSent = _sendAuthEmail(
emailSent = sendAuthEmail(
recipient=targetUser.email,
subject=emailSubject,
message="",

View file

@ -12,7 +12,7 @@ Provides unified logic for:
import copy
import json
import logging
from typing import Any, Dict, List, Optional, Callable
from typing import Any, Dict, List, Optional, Callable, Union
from fastapi.responses import JSONResponse
@ -29,64 +29,183 @@ logger = logging.getLogger(__name__)
# Central FK label resolvers (cross-DB)
# ---------------------------------------------------------------------------
def _resolveMandateLabels(ids: List[str]) -> Dict[str, str]:
def resolveMandateLabels(ids: List[str]) -> Dict[str, Optional[str]]:
"""Resolve mandate IDs to labels. Returns None (not the ID!) for
unresolvable entries so the caller can distinguish "resolved" from "missing".
"""
from modules.interfaces.interfaceDbApp import getRootInterface
rootIface = getRootInterface()
mMap = rootIface.getMandatesByIds(ids)
return {
mid: getattr(m, "label", None) or getattr(m, "name", mid) or mid
for mid, m in mMap.items()
}
result: Dict[str, Optional[str]] = {}
for mid in ids:
m = mMap.get(mid)
label = (getattr(m, "label", None) or getattr(m, "name", None)) if m else None
if not label:
logger.warning("resolveMandateLabels: no label for id=%s (found=%s)", mid, m is not None)
result[mid] = label or None
return result
def _resolveInstanceLabels(ids: List[str]) -> Dict[str, str]:
def resolveInstanceLabels(ids: List[str]) -> Dict[str, Optional[str]]:
"""Resolve feature-instance IDs to labels. Returns None for unresolvable."""
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.interfaces.interfaceFeatures import getFeatureInterface
rootIface = getRootInterface()
featureIface = getFeatureInterface(rootIface.db)
result: Dict[str, str] = {}
result: Dict[str, Optional[str]] = {}
for iid in ids:
fi = featureIface.getFeatureInstance(iid)
result[iid] = fi.label if fi and fi.label else iid
label = fi.label if fi and fi.label else None
if not label:
logger.warning("resolveInstanceLabels: no label for id=%s (found=%s)", iid, fi is not None)
result[iid] = label
return result
def _resolveUserLabels(ids: List[str]) -> Dict[str, str]:
def resolveUserLabels(ids: List[str]) -> Dict[str, Optional[str]]:
"""Resolve user IDs to display names. Returns None for unresolvable."""
from modules.interfaces.interfaceDbApp import getRootInterface
rootIface = getRootInterface()
from modules.datamodels.datamodelUam import User as _User
uniqueIds = list(set(ids))
users = rootIface.db.getRecordset(
__import__("modules.datamodels.datamodelUam", fromlist=["User"]).User,
recordFilter={"id": list(set(ids))},
_User,
recordFilter={"id": uniqueIds},
)
result: Dict[str, str] = {}
if not users and uniqueIds:
logger.warning(
"resolveUserLabels: query returned 0 users for %d ids (db=%s, table=%s). "
"Attempting full table scan...",
len(uniqueIds), getattr(rootIface.db, 'dbDatabase', '?'), _User.__name__,
)
allUsers = rootIface.db.getRecordset(_User)
logger.warning(
"resolveUserLabels: full scan found %d users total. Looking for ids: %s",
len(allUsers or []), uniqueIds[:3],
)
users = [u for u in (allUsers or []) if u.get("id") in set(uniqueIds)]
result: Dict[str, Optional[str]] = {}
found: Dict[str, dict] = {}
for u in (users or []):
uid = u.get("id", "")
result[uid] = u.get("username") or u.get("email") or uid
found[uid] = u
for uid in ids:
u = found.get(uid)
if u:
result[uid] = u.get("username") or u.get("email") or None
else:
logger.warning("resolveUserLabels: user not found for id=%s", uid)
result[uid] = None
return result
def resolveRoleLabels(ids: List[str]) -> Dict[str, Optional[str]]:
"""Resolve Role.id to roleLabel. Returns None for unresolvable."""
if not ids:
return {}
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.datamodels.datamodelRbac import Role as _Role
rootIface = getRootInterface()
recs = rootIface.db.getRecordset(
_Role,
recordFilter={"id": list(set(ids))},
) or []
out: Dict[str, Optional[str]] = {i: None for i in ids}
for r in recs:
rid = r.get("id")
if rid:
out[rid] = r.get("roleLabel") or None
for rid in ids:
if out.get(rid) is None:
logger.warning("resolveRoleLabels: no label for id=%s", rid)
return out
_BUILTIN_FK_RESOLVERS: Dict[str, Callable[[List[str]], Dict[str, str]]] = {
"Mandate": _resolveMandateLabels,
"FeatureInstance": _resolveInstanceLabels,
"User": _resolveUserLabels,
"Mandate": resolveMandateLabels,
"FeatureInstance": resolveInstanceLabels,
"User": resolveUserLabels,
"Role": resolveRoleLabels,
}
def _buildLabelResolversFromModel(modelClass: type) -> Dict[str, Callable[[List[str]], Dict[str, str]]]:
"""
Auto-build labelResolvers dict from fk_model annotations on a Pydantic model.
Maps field names to resolver functions for all fields that have a known fk_model.
Auto-build labelResolvers dict from fk_model / fk_target annotations on a Pydantic model.
Maps field names to resolver functions for all fields that have a known FK target.
Unlike ``_get_fk_sort_meta`` this does NOT require ``fk_label_field`` the
builtin resolvers already know which column to read.
"""
from modules.connectors.connectorDbPostgre import _get_fk_sort_meta
fkMeta = _get_fk_sort_meta(modelClass)
resolvers: Dict[str, Callable[[List[str]], Dict[str, str]]] = {}
for fieldName, meta in fkMeta.items():
fkModelName = meta.get("model", "")
if fkModelName in _BUILTIN_FK_RESOLVERS:
resolvers[fieldName] = _BUILTIN_FK_RESOLVERS[fkModelName]
for name, fieldInfo in modelClass.model_fields.items():
extra = fieldInfo.json_schema_extra
if not extra or not isinstance(extra, dict):
continue
fkModel = extra.get("fk_model")
tgt = extra.get("fk_target")
if not fkModel and isinstance(tgt, dict):
fkModel = tgt.get("table")
if fkModel and fkModel in _BUILTIN_FK_RESOLVERS:
resolvers[name] = _BUILTIN_FK_RESOLVERS[fkModel]
return resolvers
def enrichRowsWithFkLabels(
rows: List[Dict[str, Any]],
modelClass: type = None,
*,
labelResolvers: Optional[Dict[str, Callable[[List[str]], Dict[str, Optional[str]]]]] = None,
extraResolvers: Optional[Dict[str, Callable[[List[str]], Dict[str, Optional[str]]]]] = None,
) -> List[Dict[str, Any]]:
"""Add ``{field}Label`` columns to each row for every FK field that has a
registered resolver.
``modelClass`` if provided, resolvers are auto-built from ``fk_model``
annotations on the Pydantic model (via ``_buildLabelResolversFromModel``).
``labelResolvers`` explicit resolver map that overrides auto-built ones.
``extraResolvers`` merged on top of auto-built / explicit resolvers. Use
for ad-hoc fields that are not FK-annotated on the model (e.g.
``createdByUserId`` on billing transactions).
If a label cannot be resolved the ``{field}Label`` value is ``None``
(never the raw ID that would reintroduce the silent-truncation bug).
"""
resolvers: Dict[str, Callable] = {}
if modelClass is not None and labelResolvers is None:
resolvers = _buildLabelResolversFromModel(modelClass)
elif labelResolvers is not None:
resolvers = dict(labelResolvers)
if extraResolvers:
resolvers.update(extraResolvers)
if not resolvers or not rows:
return rows
for field, resolver in resolvers.items():
ids = list({str(r.get(field)) for r in rows if r.get(field)})
if not ids:
continue
try:
labelMap = resolver(ids)
except Exception as e:
logger.error("enrichRowsWithFkLabels: resolver for '%s' raised: %s", field, e)
labelMap = {}
labelKey = f"{field}Label"
for r in rows:
fkVal = r.get(field)
if fkVal:
r[labelKey] = labelMap.get(str(fkVal))
else:
r[labelKey] = None
return rows
# ---------------------------------------------------------------------------
# Cross-filter pagination parsing
# ---------------------------------------------------------------------------
@ -210,7 +329,7 @@ def handleIdsMode(
# In-memory helpers (for enriched / non-SQL routes)
# ---------------------------------------------------------------------------
def _applyFiltersAndSort(
def applyFiltersAndSort(
items: List[Dict[str, Any]],
paginationParams: Optional[PaginationParams],
) -> List[Dict[str, Any]]:
@ -364,12 +483,21 @@ def _extractDistinctValues(
items: List[Dict[str, Any]],
columnKey: str,
requestLang: Optional[str] = None,
) -> List[str]:
"""Extract sorted distinct display values for a column from enriched items."""
) -> List[Optional[str]]:
"""Extract sorted distinct display values for a column from enriched items.
Includes ``None`` as the last entry when at least one row has a null/empty
value this enables the "(Leer)" filter option in the frontend.
"""
_MISSING = object()
values = set()
hasEmpty = False
for item in items:
val = item.get(columnKey)
val = item.get(columnKey, _MISSING)
if val is _MISSING:
continue
if val is None or val == "":
hasEmpty = True
continue
if isinstance(val, bool):
values.add("true" if val else "false")
@ -381,7 +509,10 @@ def _extractDistinctValues(
values.add(text)
else:
values.add(str(val))
return sorted(values, key=lambda v: v.lower())
result: List[Optional[str]] = sorted(values, key=lambda v: v.lower())
if hasEmpty:
result.append(None)
return result
def handleFilterValuesInMemory(
@ -396,7 +527,7 @@ def handleFilterValuesInMemory(
Returns JSONResponse to bypass FastAPI response_model validation.
"""
crossFilterParams = parseCrossFilterPagination(column, paginationJson)
crossFiltered = _applyFiltersAndSort(items, crossFilterParams)
crossFiltered = applyFiltersAndSort(items, crossFilterParams)
return JSONResponse(content=_extractDistinctValues(crossFiltered, column, requestLang))
@ -411,7 +542,7 @@ def handleIdsInMemory(
Returns JSONResponse to bypass FastAPI response_model validation.
"""
pagination = parsePaginationForIds(paginationJson)
filtered = _applyFiltersAndSort(items, pagination)
filtered = applyFiltersAndSort(items, pagination)
ids = []
for item in filtered:
val = item.get(idField)
@ -510,6 +641,7 @@ def getRecordsetPaginatedWithFkSort(
idOrder = {pid: idx for idx, pid in enumerate(pageIds)}
pageItems.sort(key=lambda r: idOrder.get(r.get(idField), 999999))
enrichRowsWithFkLabels(pageItems, modelClass)
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
return {"items": pageItems, "totalItems": totalItems, "totalPages": totalPages}

View file

@ -26,7 +26,7 @@ from fastapi.responses import Response
from pydantic import BaseModel, Field
from modules.auth import getCurrentUser, requireSysAdmin, requirePlatformAdmin
from modules.connectors.connectorDbPostgre import _get_cached_connector
from modules.connectors.connectorDbPostgre import getCachedConnector
from modules.datamodels.datamodelAi import (
AiCallOptions,
AiCallRequest,
@ -40,11 +40,11 @@ from modules.datamodels.datamodelRbac import Role
from modules.datamodels.datamodelFeatures import Feature
from modules.datamodels.datamodelNotification import NotificationType
from modules.interfaces.interfaceDbManagement import getInterface as getMgmtInterface
from modules.routes.routeNotifications import _createNotification
from modules.routes.routeNotifications import createNotification
from modules.shared.configuration import APP_CONFIG
from modules.shared.i18nRegistry import (
_enforceSourcePlaceholders,
_loadCache as _reloadI18nCache,
loadCache as _reloadI18nCache,
apiRouteContext,
)
from modules.shared.timeUtils import getUtcTimestamp
@ -109,7 +109,7 @@ _ISO_PRIORITY_CODES: List[str] = ["de", "gsw", "en", "fr", "it"]
# ---------------------------------------------------------------------------
def _publicMgmtDb():
return _get_cached_connector(
return getCachedConnector(
dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
dbDatabase="poweron_management",
dbUser=APP_CONFIG.get("DB_USER"),
@ -729,7 +729,7 @@ async def _run_create_language_job_async(userId: str, code: str, label: str, cur
tmCount = await _translateTextMultilingualFields(db, code, label, billingCb)
_createNotification(
createNotification(
userId,
NotificationType.SYSTEM,
title="Sprachset erstellt",
@ -739,7 +739,7 @@ async def _run_create_language_job_async(userId: str, code: str, label: str, cur
logger.info("i18n create job done: code=%s, translated=%d/%d, tm_fields=%d", code, len(translated), len(xxEntries), tmCount)
except Exception as e:
logger.exception("create language job failed: %s", e)
_createNotification(
createNotification(
userId,
NotificationType.SYSTEM,
title="Sprachset fehlgeschlagen",
@ -790,7 +790,7 @@ async def create_language_set(
db.recordCreate(UiLanguageSet, rec)
background.add_task(_run_create_language_job, uid, code, resolvedLabel, currentUser, mandateId)
_createNotification(
createNotification(
uid,
NotificationType.SYSTEM,
title="Sprachset wird erzeugt",

View file

@ -21,7 +21,7 @@ from pydantic import BaseModel, Field, model_validator
from modules.auth import limiter, getRequestContext, RequestContext, getCurrentUser
from modules.datamodels.datamodelUam import User
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
from modules.routes.routeHelpers import _applyFiltersAndSort, handleFilterValuesInMemory, handleIdsInMemory
from modules.routes.routeHelpers import applyFiltersAndSort, handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels
from modules.datamodels.datamodelInvitation import Invitation
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.shared.timeUtils import getUtcTimestamp
@ -302,8 +302,8 @@ def create_invitation(
emailSubject = f"Einladung zu {mandateName}"
invite_desc = f"dem Mandanten «{mandateName}» beizutreten"
from modules.routes.routeSecurityLocal import _buildAuthEmailHtml
emailBody = _buildAuthEmailHtml(
from modules.routes.routeSecurityLocal import buildAuthEmailHtml
emailBody = buildAuthEmailHtml(
greeting=f"Hallo {display_name}",
bodyLines=[
f"Sie wurden eingeladen, {invite_desc}.",
@ -496,20 +496,22 @@ def list_invitations(
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
if paginationParams:
filtered = _applyFiltersAndSort(result, paginationParams)
filtered = applyFiltersAndSort(result, paginationParams)
totalItems = len(filtered)
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
endIdx = startIdx + paginationParams.pageSize
enriched = enrichRowsWithFkLabels(filtered[startIdx:endIdx], Invitation)
return {
"items": filtered[startIdx:endIdx],
"items": enriched,
"pagination": PaginationMetadata(
currentPage=paginationParams.page, pageSize=paginationParams.pageSize,
totalItems=totalItems, totalPages=totalPages,
sort=paginationParams.sort, filters=paginationParams.filters,
).model_dump(),
}
return result
enriched = enrichRowsWithFkLabels(result, Invitation)
return {"items": enriched, "pagination": None}
except HTTPException:
raise
@ -809,13 +811,13 @@ def accept_invitation(
if featureInstanceId:
existingAccess = rootInterface.getFeatureAccess(str(currentUser.id), featureInstanceId)
if existingAccess:
# Update existing access with additional roles
# Update existing access with additional roles. addRoleToFeatureAccess
# is already idempotent (returns silently when the role is already
# assigned), so any exception here is a real error and must be
# surfaced — not swallowed.
featureAccessId = str(existingAccess.id)
for roleId in roleIds:
try:
rootInterface.addRoleToFeatureAccess(str(existingAccess.id), roleId)
except Exception:
pass # Role might already be assigned
rootInterface.addRoleToFeatureAccess(str(existingAccess.id), roleId)
message = "Roles updated for existing feature access"
else:
# Create feature access with instance-level roles
@ -828,14 +830,13 @@ def accept_invitation(
featureAccessId = str(featureAccess.id)
message = "Successfully joined feature instance"
else:
# Legacy: mandate-only invitation (no feature instance)
# Legacy: mandate-only invitation (no feature instance).
# addRoleToUserMandate is already idempotent — any exception here
# is a real error (e.g. DB / FK constraint) and must propagate.
existingMembership = rootInterface.getUserMandate(str(currentUser.id), mandateId)
if existingMembership:
for roleId in roleIds:
try:
rootInterface.addRoleToUserMandate(str(existingMembership.id), roleId)
except Exception:
pass
rootInterface.addRoleToUserMandate(str(existingMembership.id), roleId)
message = "Roles updated for existing membership"
else:
rootInterface.createUserMandate(

View file

@ -52,7 +52,7 @@ class UnreadCountResponse(BaseModel):
# Helper Functions
# =============================================================================
def _createNotification(
def createNotification(
userId: str,
notificationType: NotificationType,
title: str,
@ -103,7 +103,7 @@ def create_access_change_notification(
Failures are logged only so RBAC mutations still succeed.
"""
try:
_createNotification(
createNotification(
userId=userId,
notificationType=NotificationType.SYSTEM,
title=title,
@ -132,7 +132,7 @@ def createInvitationNotification(
msg = f"{inviterName} hat Sie zur Feature-Instanz '{featureInstanceName}' eingeladen."
else:
msg = f"{inviterName} hat Sie zu '{mandateName}' eingeladen."
return _createNotification(
return createNotification(
userId=userId,
notificationType=NotificationType.INVITATION,
title="Neue Einladung",

View file

@ -28,7 +28,7 @@ routeApiMsg = apiRouteContext("routeSecurityLocal")
logger = logging.getLogger(__name__)
def _buildAuthEmailHtml(
def buildAuthEmailHtml(
greeting: str,
bodyLines: list,
buttonText: str = None,
@ -118,7 +118,7 @@ def _buildAuthEmailHtml(
</html>'''
def _sendAuthEmail(recipient: str, subject: str, message: str, userId: str = None, htmlOverride: str = None) -> bool:
def sendAuthEmail(recipient: str, subject: str, message: str, userId: str = None, htmlOverride: str = None) -> bool:
"""
Send authentication-related email directly without requiring full Services initialization.
Used for registration, password reset, and other auth flows.
@ -128,7 +128,7 @@ def _sendAuthEmail(recipient: str, subject: str, message: str, userId: str = Non
subject: Email subject
message: Plain text fallback (ignored when htmlOverride is given)
userId: Optional user ID for logging
htmlOverride: Pre-built branded HTML (from _buildAuthEmailHtml)
htmlOverride: Pre-built branded HTML (from buildAuthEmailHtml)
Returns:
bool: True if email was sent successfully
@ -486,7 +486,7 @@ def register_user(
expiryHours = int(APP_CONFIG.get("Auth_RESET_TOKEN_EXPIRY_HOURS", "24"))
emailSubject = "PowerOn Registrierung - Passwort setzen"
emailHtml = _buildAuthEmailHtml(
emailHtml = buildAuthEmailHtml(
greeting=f"Hallo {user.fullName or user.username}",
bodyLines=[
"Vielen Dank für Ihre Registrierung bei PowerOn.",
@ -500,7 +500,7 @@ def register_user(
footerText=f"Dieser Link ist {expiryHours} Stunden gültig. Falls Sie sich nicht registriert haben, können Sie diese E-Mail ignorieren.",
)
emailSent = _sendAuthEmail(
emailSent = sendAuthEmail(
recipient=user.email,
subject=emailSubject,
message="",
@ -787,7 +787,7 @@ def password_reset_request(
# Send email using dedicated auth email function
emailSubject = "PowerOn - Passwort zurücksetzen"
emailHtml = _buildAuthEmailHtml(
emailHtml = buildAuthEmailHtml(
greeting=f"Hallo {user.fullName or user.username}",
bodyLines=[
"Sie haben eine Passwort-Zurücksetzung für Ihren PowerOn Account angefordert.",
@ -801,7 +801,7 @@ def password_reset_request(
footerText=f"Dieser Link ist {expiryHours} Stunden gültig. Falls Sie diese Anforderung nicht gestellt haben, können Sie diese E-Mail ignorieren.",
)
emailSent = _sendAuthEmail(
emailSent = sendAuthEmail(
recipient=user.email,
subject=emailSubject,
message="",

View file

@ -227,7 +227,7 @@ def getSubscriptionInfo(
}
from modules.datamodels.datamodelSubscription import BUILTIN_PLANS
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
subInterface = _getSubRoot()
allSubs = subInterface.listForMandate(mandateId)
@ -342,7 +342,7 @@ def activateStoreFeature(
# ── 1. Resolve subscription & plan ──────────────────────────────
from modules.datamodels.datamodelSubscription import MandateSubscription, BUILTIN_PLANS, SubscriptionStatusEnum
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
subInterface = _getSubRoot()
operative = subInterface.getOperativeForMandate(mandateId)

View file

@ -22,7 +22,7 @@ from pydantic import BaseModel, Field
from modules.auth import limiter, getRequestContext, RequestContext
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata, normalize_pagination_dict
from modules.routes.routeHelpers import _applyFiltersAndSort, handleFilterValuesInMemory, handleIdsInMemory
from modules.routes.routeHelpers import applyFiltersAndSort, handleFilterValuesInMemory, handleIdsInMemory
from modules.shared.i18nRegistry import apiRouteContext, resolveText
routeApiMsg = apiRouteContext("routeSubscription")
@ -46,25 +46,28 @@ def _resolveMandateId(context: RequestContext) -> str:
def _assertMandateAdmin(context: RequestContext, mandateId: str) -> None:
"""Authorize the caller as PlatformAdmin or admin of the given mandate.
Fail-loud: a DB error during role lookup MUST NOT be silently masked as
"no permission" that produces misleading 403s and hides infra outages.
Any unexpected exception propagates and surfaces as a 500.
"""
if context.isPlatformAdmin:
return
try:
from modules.interfaces.interfaceDbApp import getRootInterface
rootInterface = getRootInterface()
userMandates = rootInterface.getUserMandates(str(context.user.id))
for um in userMandates:
if str(getattr(um, "mandateId", None)) != str(mandateId):
continue
if not getattr(um, "enabled", True):
continue
umId = str(getattr(um, "id", ""))
roleIds = rootInterface.getRoleIdsForUserMandate(umId)
for roleId in roleIds:
role = rootInterface.getRole(roleId)
if role and role.roleLabel == "admin" and not role.featureInstanceId:
return
except Exception:
pass
from modules.interfaces.interfaceDbApp import getRootInterface
rootInterface = getRootInterface()
userMandates = rootInterface.getUserMandates(str(context.user.id))
for um in userMandates:
if str(getattr(um, "mandateId", None)) != str(mandateId):
continue
if not getattr(um, "enabled", True):
continue
umId = str(getattr(um, "id", ""))
roleIds = rootInterface.getRoleIdsForUserMandate(umId)
for roleId in roleIds:
role = rootInterface.getRole(roleId)
if role and role.roleLabel == "admin" and not role.featureInstanceId:
return
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=routeApiMsg("Mandate admin role required"))
@ -309,7 +312,7 @@ def forceCancel(
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import (
getService as getSubscriptionService,
)
from modules.interfaces.interfaceDbSubscription import _getRootInterface as getSubRootInterface
from modules.interfaces.interfaceDbSubscription import getRootInterface as getSubRootInterface
sub = getSubRootInterface().getById(data.subscriptionId)
if not sub:
raise HTTPException(status_code=404, detail=routeApiMsg("Subscription not found"))
@ -360,10 +363,10 @@ def verifyCheckout(
if session.get("mode") != "subscription":
raise HTTPException(status_code=400, detail=routeApiMsg("Not a subscription checkout session"))
from modules.routes.routeBilling import _handleSubscriptionCheckoutCompleted
from modules.routes.routeBilling import handleSubscriptionCheckoutCompleted
try:
_handleSubscriptionCheckoutCompleted(session, f"verify-{data.sessionId}")
handleSubscriptionCheckoutCompleted(session, f"verify-{data.sessionId}")
except Exception as e:
logger.warning(
"verifyCheckout: handler raised for session %s mandate %s: %s",
@ -383,7 +386,7 @@ def verifyCheckout(
planKey = operative.get("planKey", "")
if planKey:
try:
from modules.interfaces.interfaceDbBilling import _getRootInterface as _getBillingRoot
from modules.interfaces.interfaceDbBilling import getRootInterface as _getBillingRoot
_getBillingRoot().ensureActivationBudget(mandateId, planKey)
except Exception as ex:
logger.warning("verifyCheckout: ensureActivationBudget failed: %s", ex)
@ -398,23 +401,15 @@ def verifyCheckout(
def _buildEnrichedSubscriptions() -> List[Dict[str, Any]]:
"""Build the full enriched subscription list (shared by list + mode=filterValues)."""
from modules.interfaces.interfaceDbSubscription import _getRootInterface as getSubRootInterface
from modules.interfaces.interfaceDbSubscription import getRootInterface as getSubRootInterface
from modules.datamodels.datamodelSubscription import BUILTIN_PLANS, OPERATIVE_STATUSES
subInterface = getSubRootInterface()
allSubs = subInterface.listAll()
mandateNames: Dict[str, str] = {}
try:
from modules.datamodels.datamodelUam import Mandate
from modules.security.rootAccess import getRootDbAppConnector
appDb = getRootDbAppConnector()
for row in appDb.getRecordset(Mandate):
r = dict(row)
mid = r.get("id", "")
mandateNames[mid] = r.get("label") or r.get("name") or mid[:8]
except Exception as e:
logger.warning("Could not bulk-resolve mandate names: %s", e)
from modules.routes.routeHelpers import resolveMandateLabels
allMandateIds = list({sub.get("mandateId") for sub in allSubs if sub.get("mandateId")})
mandateNames: Dict[str, Optional[str]] = resolveMandateLabels(allMandateIds) if allMandateIds else {}
operativeValues = {s.value for s in OPERATIVE_STATUSES}
@ -452,7 +447,7 @@ def _buildEnrichedSubscriptions() -> List[Dict[str, Any]]:
planKey = sub.get("planKey", "")
plan = BUILTIN_PLANS.get(planKey)
sub["mandateName"] = mandateNames.get(mid, mid[:8])
sub["mandateName"] = mandateNames.get(mid)
sub["planTitle"] = resolveText(plan.title) if plan else planKey
if sub.get("status") in operativeValues:
@ -507,7 +502,7 @@ def getAllSubscriptions(
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
enriched = _buildEnrichedSubscriptions()
filtered = _applyFiltersAndSort(enriched, paginationParams)
filtered = applyFiltersAndSort(enriched, paginationParams)
if paginationParams:
totalItems = len(filtered)
@ -547,7 +542,7 @@ def _getDataVolumeUsage(
from modules.datamodels.datamodelFeatures import FeatureInstance
from modules.interfaces.interfaceDbKnowledge import aggregateMandateRagTotalBytes
from modules.interfaces.interfaceDbManagement import getInterface as getMgmtInterface
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRootIf
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRootIf
rootIf = getRootInterface()
mandateId = targetMandateId

View file

@ -19,7 +19,7 @@ from slowapi import Limiter
from slowapi.util import get_remote_address
from modules.auth.authentication import getRequestContext, RequestContext
from modules.system.mainSystem import NAVIGATION_SECTIONS, _objectKeyToUiComponent
from modules.system.mainSystem import NAVIGATION_SECTIONS, objectKeyToUiComponent
from modules.shared.i18nRegistry import resolveText, t
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.interfaces.interfaceFeatures import getFeatureInterface
@ -455,7 +455,7 @@ def _buildStaticBlocks(
def _formatBlockItem(item: Dict[str, Any]) -> Dict[str, Any]:
"""Format a navigation item for the API response."""
objectKey = item["objectKey"]
uiComponent = _objectKeyToUiComponent(objectKey)
uiComponent = objectKeyToUiComponent(objectKey)
return {
"uiComponent": uiComponent,

View file

@ -14,7 +14,7 @@ from typing import Any, Dict
from fastapi import APIRouter, Body, Depends, HTTPException, Query, Request, status
from modules.auth import getCurrentUser, limiter
from modules.datamodels.datamodelUam import User, UserVoicePreferences, _normalizeTtsVoiceMap
from modules.datamodels.datamodelUam import User, UserVoicePreferences, normalizeTtsVoiceMap
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
from modules.shared.i18nRegistry import apiRouteContext
@ -83,7 +83,7 @@ def updateVoicePreferences(
}
updateData = {k: v for k, v in preferences.items() if k in allowedFields}
if "ttsVoiceMap" in updateData:
updateData["ttsVoiceMap"] = _normalizeTtsVoiceMap(updateData["ttsVoiceMap"])
updateData["ttsVoiceMap"] = normalizeTtsVoiceMap(updateData["ttsVoiceMap"])
if existing:
existingRecord = existing[0]

View file

@ -154,6 +154,35 @@ def _userMayDeleteWorkflow(context: RequestContext, wfMandateId: Optional[str])
return wfMandateId in adminMandateIds
def _parsePaginationOr400(pagination: Optional[str]) -> Optional[PaginationParams]:
"""Parse a JSON pagination query string into PaginationParams.
Returns None when the input is empty/None. Raises HTTPException(400) on any
parse / validation error so the caller can propagate the error to the
client instead of silently falling back to defaults (which used to mask
real frontend bugs).
"""
if not pagination:
return None
try:
paginationDict = json.loads(pagination)
except json.JSONDecodeError as e:
raise HTTPException(
status_code=400,
detail=f"Invalid 'pagination' query: not valid JSON ({e.msg})",
)
if not paginationDict:
return None
try:
paginationDict = normalize_pagination_dict(paginationDict)
return PaginationParams(**paginationDict)
except Exception as e:
raise HTTPException(
status_code=400,
detail=f"Invalid 'pagination' payload: {e}",
)
def _cascadeDeleteAutoWorkflow(db: DatabaseConnector, workflowId: str) -> None:
"""Delete AutoWorkflow and dependent rows (same order as interfaceDbApp._cascadeDeleteGraphicalEditorData)."""
wf_id = workflowId
@ -218,16 +247,7 @@ def get_workflow_runs(
if mandateId:
recordFilter["mandateId"] = mandateId
paginationParams = None
if pagination:
try:
paginationDict = json.loads(pagination)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
paginationParams = PaginationParams(**paginationDict)
except Exception:
pass
paginationParams = _parsePaginationOr400(pagination)
if not paginationParams:
page = (offset // limit) + 1 if limit > 0 else 1
paginationParams = PaginationParams(
@ -252,30 +272,7 @@ def get_workflow_runs(
for wf in (wfs or []):
wfMap[wf.get("id")] = wf
mandateIds = list({r.get("mandateId") for r in pageRuns if r.get("mandateId")})
instanceIds = list({
wfMap[r.get("workflowId")].get("featureInstanceId")
for r in pageRuns
if r.get("workflowId") in wfMap and wfMap[r.get("workflowId")].get("featureInstanceId")
})
mandateLabelMap: dict = {}
instanceLabelMap: dict = {}
try:
rootIface = getRootInterface()
if mandateIds:
mMap = rootIface.getMandatesByIds(mandateIds)
for mid, m in mMap.items():
mandateLabelMap[mid] = getattr(m, "label", None) or getattr(m, "name", mid) or mid
if instanceIds:
from modules.interfaces.interfaceFeatures import getFeatureInterface
featureIface = getFeatureInterface(rootIface.db)
for iid in instanceIds:
fi = featureIface.getFeatureInstance(iid)
if fi:
instanceLabelMap[iid] = fi.label or iid
except Exception as e:
logger.warning(f"Failed to enrich run labels: {e}")
from modules.routes.routeHelpers import enrichRowsWithFkLabels, resolveMandateLabels, resolveInstanceLabels
runs = []
for r in pageRuns:
@ -286,14 +283,22 @@ def get_workflow_runs(
row.get("label")
or (wf.get("label") if isinstance(wf, dict) else None)
or wfId
or ""
)
row["mandateLabel"] = mandateLabelMap.get(row.get("mandateId"), row.get("mandateId") or "")
fiid = wf.get("featureInstanceId") if isinstance(wf, dict) else None
row["featureInstanceId"] = fiid
row["instanceLabel"] = instanceLabelMap.get(fiid, fiid or "")
runs.append(row)
enrichRowsWithFkLabels(
runs,
labelResolvers={
"mandateId": resolveMandateLabels,
"featureInstanceId": resolveInstanceLabels,
},
)
for row in runs:
row["instanceLabel"] = row.pop("featureInstanceIdLabel", None)
row["mandateLabel"] = row.pop("mandateIdLabel", None)
return {"runs": runs, "total": total, "limit": limit, "offset": offset}
@ -349,18 +354,15 @@ def get_workflow_metrics(
totalRuns = countResult.get("totalItems", 0) if isinstance(countResult, dict) else countResult.totalItems
runsByStatus: dict = {}
try:
statusValues = db.getDistinctColumnValues(AutoRun, "status", recordFilter=runBaseFilter)
for sv in (statusValues or []):
statusFilter = dict(runBaseFilter) if runBaseFilter else {}
statusFilter["status"] = sv
sr = db.getRecordsetPaginated(
AutoRun, pagination=PaginationParams(page=1, pageSize=1),
recordFilter=statusFilter,
)
runsByStatus[sv] = sr.get("totalItems", 0) if isinstance(sr, dict) else sr.totalItems
except Exception as e:
logger.warning(f"Failed to compute runsByStatus: {e}")
statusValues = db.getDistinctColumnValues(AutoRun, "status", recordFilter=runBaseFilter)
for sv in (statusValues or []):
statusFilter = dict(runBaseFilter) if runBaseFilter else {}
statusFilter["status"] = sv
sr = db.getRecordsetPaginated(
AutoRun, pagination=PaginationParams(page=1, pageSize=1),
recordFilter=statusFilter,
)
runsByStatus[sv] = sr.get("totalItems", 0) if isinstance(sr, dict) else sr.totalItems
totalTokens = 0
totalCredits = 0.0
@ -425,16 +427,7 @@ def get_system_workflows(
if mandateId:
recordFilter["mandateId"] = mandateId
paginationParams = None
if pagination:
try:
paginationDict = json.loads(pagination)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
paginationParams = PaginationParams(**paginationDict)
except Exception:
pass
paginationParams = _parsePaginationOr400(pagination)
if not paginationParams:
paginationParams = PaginationParams(
page=1,
@ -452,28 +445,25 @@ def get_system_workflows(
totalItems = result.get("totalItems", 0) if isinstance(result, dict) else result.totalItems
totalPages = result.get("totalPages", 0) if isinstance(result, dict) else result.totalPages
mandateIds = list({w.get("mandateId") for w in pageItems if w.get("mandateId")})
instanceIds = list({w.get("featureInstanceId") for w in pageItems if w.get("featureInstanceId")})
from modules.routes.routeHelpers import enrichRowsWithFkLabels, resolveMandateLabels, resolveInstanceLabels
mandateLabelMap: dict = {}
instanceLabelMap: dict = {}
# Resolve featureCode in same pass as instance labels — need full FI object
featureCodeMap: dict = {}
try:
rootIface = getRootInterface()
if mandateIds:
mandateMap = rootIface.getMandatesByIds(mandateIds)
for mid, m in mandateMap.items():
mandateLabelMap[mid] = getattr(m, "label", None) or getattr(m, "name", mid) or mid
if instanceIds:
from modules.interfaces.interfaceFeatures import getFeatureInterface
featureIface = getFeatureInterface(rootIface.db)
for iid in instanceIds:
fi = featureIface.getFeatureInstance(iid)
if fi:
instanceLabelMap[iid] = fi.label or iid
featureCodeMap[iid] = fi.featureCode
except Exception as e:
logger.warning(f"Failed to enrich workflow labels: {e}")
def _resolveInstanceLabelsWithFeatureCode(ids):
from modules.interfaces.interfaceDbApp import getRootInterface as _getRI
from modules.interfaces.interfaceFeatures import getFeatureInterface
rootIf = _getRI()
featureIf = getFeatureInterface(rootIf.db)
result = {}
for iid in ids:
fi = featureIf.getFeatureInstance(iid)
if fi:
result[iid] = fi.label or None
featureCodeMap[iid] = fi.featureCode
else:
logger.warning("getSystemWorkflows: feature-instance not found for id=%s", iid)
result[iid] = None
return result
userId = str(context.user.id) if context.user else None
adminMandateIds = []
@ -485,30 +475,23 @@ def get_system_workflows(
activeRunMap: dict = {}
runCountMap: dict = {}
lastStartedMap: dict = {}
if workflowIds:
try:
if db._ensureTableExists(AutoRun):
for wfId in workflowIds:
runs = db.getRecordset(AutoRun, recordFilter={"workflowId": wfId})
runCountMap[wfId] = len(runs)
for r in runs:
rDict = dict(r)
ts = rDict.get("sysCreatedAt")
if ts and (lastStartedMap.get(wfId) is None or ts > lastStartedMap.get(wfId)):
lastStartedMap[wfId] = ts
if rDict.get("status") in ("running", "paused"):
activeRunMap[wfId] = rDict.get("id")
except Exception as e:
logger.warning(f"Failed to enrich workflow run info: {e}")
if workflowIds and db._ensureTableExists(AutoRun):
for wfId in workflowIds:
runs = db.getRecordset(AutoRun, recordFilter={"workflowId": wfId})
runCountMap[wfId] = len(runs)
for r in runs:
rDict = dict(r)
ts = rDict.get("sysCreatedAt")
if ts and (lastStartedMap.get(wfId) is None or ts > lastStartedMap.get(wfId)):
lastStartedMap[wfId] = ts
if rDict.get("status") in ("running", "paused"):
activeRunMap[wfId] = rDict.get("id")
items = []
for w in pageItems:
row = dict(w)
wMandateId = row.get("mandateId")
wfId = row.get("id")
row["mandateLabel"] = mandateLabelMap.get(wMandateId, wMandateId or "")
row["instanceLabel"] = instanceLabelMap.get(row.get("featureInstanceId"), row.get("featureInstanceId") or "")
row["featureCode"] = featureCodeMap.get(row.get("featureInstanceId"), "")
row["isRunning"] = wfId in activeRunMap
row["activeRunId"] = activeRunMap.get(wfId)
row["runCount"] = runCountMap.get(wfId, 0)
@ -528,9 +511,20 @@ def get_system_workflows(
row["canExecute"] = False
row.pop("graph", None)
items.append(row)
enrichRowsWithFkLabels(
items,
labelResolvers={
"mandateId": resolveMandateLabels,
"featureInstanceId": _resolveInstanceLabelsWithFeatureCode,
},
)
for row in items:
row["instanceLabel"] = row.pop("featureInstanceIdLabel", None)
row["mandateLabel"] = row.pop("mandateIdLabel", None)
row["featureCode"] = featureCodeMap.get(row.get("featureInstanceId"))
return {
"items": items,
"pagination": {
@ -572,15 +566,26 @@ def delete_system_workflow(
try:
_cascadeDeleteAutoWorkflow(db, workflowId)
try:
from modules.shared.callbackRegistry import callbackRegistry
callbackRegistry.trigger("graphicalEditor.workflow.changed")
except Exception:
pass
except Exception as e:
logger.error(f"delete_system_workflow cascade failed: {e}")
raise HTTPException(status_code=500, detail=routeApiMsg(str(e)))
# Callback registry: log + propagate so listener bugs are visible.
# Cascade is already committed at this point — failure here is a side-effect
# bug (stale caches, missed notifications), never a "ignore silently" event.
try:
from modules.shared.callbackRegistry import callbackRegistry
callbackRegistry.trigger("graphicalEditor.workflow.changed")
except Exception as e:
logger.error(
f"delete_system_workflow: callbackRegistry.trigger failed for "
f"workflowId={workflowId}: {e}"
)
raise HTTPException(
status_code=500,
detail=routeApiMsg(f"Workflow deleted but post-delete callback failed: {e}"),
)
return {"success": True, "id": workflowId}
@ -591,18 +596,34 @@ def delete_system_workflow(
def _enrichedFilterValues(
db, context: RequestContext, modelClass, scopeFilter, column: str,
):
"""Return distinct filter values (IDs) for FK columns or delegate to DB-level DISTINCT.
FK columns return raw IDs the frontend resolves them to labels via fkCache.
Returns JSONResponse to bypass FastAPI response_model validation."""
"""Return distinct filter values for FormGeneratorTable column filters.
For FK columns (mandateId, featureInstanceId) returns ``{value, label}``
objects so the frontend can display human-readable labels in the dropdown
without a separate source fk fetch. Non-FK columns return ``string | null``.
``null`` is included when rows with NULL/empty values exist (enables the
"(Leer)" filter option).
Returns JSONResponse to bypass FastAPI response_model validation.
"""
from fastapi.responses import JSONResponse
from modules.routes.routeHelpers import resolveMandateLabels, resolveInstanceLabels
if column in ("mandateLabel", "mandateId"):
baseFilter = scopeFilter(context)
recordFilter = dict(baseFilter) if baseFilter else {}
if modelClass == AutoWorkflow:
recordFilter["isTemplate"] = False
items = db.getRecordset(modelClass, recordFilter=recordFilter or None, fieldFilter=["mandateId"]) or []
mandateIds = sorted({r.get("mandateId") for r in items if r.get("mandateId")})
return JSONResponse(content=mandateIds)
allVals = {r.get("mandateId") for r in items}
mandateIds = sorted(v for v in allVals if v)
hasEmpty = None in allVals or "" in allVals
labelMap = resolveMandateLabels(mandateIds) if mandateIds else {}
result = [{"value": mid, "label": labelMap.get(mid) or f"NA({mid})"} for mid in mandateIds]
if hasEmpty:
result.append(None)
return JSONResponse(content=result)
if column in ("instanceLabel", "featureInstanceId"):
baseFilter = scopeFilter(context)
@ -610,15 +631,24 @@ def _enrichedFilterValues(
if modelClass == AutoWorkflow:
recordFilter["isTemplate"] = False
items = db.getRecordset(modelClass, recordFilter=recordFilter or None, fieldFilter=["featureInstanceId"]) or []
instanceIds = sorted({r.get("featureInstanceId") for r in items if r.get("featureInstanceId")})
allVals = {r.get("featureInstanceId") for r in items}
instanceIds = sorted(v for v in allVals if v)
hasEmpty = None in allVals or "" in allVals
else:
items = db.getRecordset(modelClass, recordFilter=recordFilter or None, fieldFilter=["workflowId"]) or []
wfIds = list({r.get("workflowId") for r in items if r.get("workflowId")})
instanceIds = []
hasEmpty = False
if wfIds and db._ensureTableExists(AutoWorkflow):
wfs = db.getRecordset(AutoWorkflow, recordFilter={"id": wfIds}, fieldFilter=["featureInstanceId"]) or []
instanceIds = sorted({w.get("featureInstanceId") for w in wfs if w.get("featureInstanceId")})
return JSONResponse(content=instanceIds)
allVals = {w.get("featureInstanceId") for w in wfs}
instanceIds = sorted(v for v in allVals if v)
hasEmpty = None in allVals or "" in allVals
labelMap = resolveInstanceLabels(instanceIds) if instanceIds else {}
result = [{"value": iid, "label": labelMap.get(iid) or f"NA({iid})"} for iid in instanceIds]
if hasEmpty:
result.append(None)
return JSONResponse(content=result)
if column == "workflowLabel":
baseFilter = scopeFilter(context)
@ -626,9 +656,12 @@ def _enrichedFilterValues(
items = db.getRecordset(modelClass, recordFilter=recordFilter or None, fieldFilter=["workflowId", "label"]) or []
labels = set()
wfIds = set()
hasEmpty = False
for r in items:
if r.get("label"):
labels.add(r["label"])
elif not r.get("workflowId"):
hasEmpty = True
if r.get("workflowId"):
wfIds.add(r["workflowId"])
if wfIds and db._ensureTableExists(AutoWorkflow):
@ -636,7 +669,10 @@ def _enrichedFilterValues(
for wf in wfs:
if wf.get("label"):
labels.add(wf["label"])
return JSONResponse(content=sorted(labels, key=lambda v: v.lower()))
result = sorted(labels, key=lambda v: v.lower())
if hasEmpty:
result.append(None)
return JSONResponse(content=result)
baseFilter = scopeFilter(context)
recordFilter = dict(baseFilter) if baseFilter else {}

View file

@ -592,7 +592,7 @@ _DATA_SOURCE_TOOLS = {"browseDataSource", "searchDataSource", "downloadFromDataS
_DECISION_TOOLS = {"writeFile", "replaceInFile"}
def _classifyToolResult(
def classifyToolResult(
tc: ToolCallRequest, result: ToolResult
) -> Optional[Dict[str, Any]]:
"""Classify a successful tool result into a RoundMemory dict.

View file

@ -578,7 +578,7 @@ class AgentService:
def _createPersistRoundMemoryFn(self, workflowId: str):
"""Create callback that persists RoundMemory entries after tool execution."""
from modules.serviceCenter.services.serviceAgent.agentLoop import _classifyToolResult
from modules.serviceCenter.services.serviceAgent.agentLoop import classifyToolResult
from modules.datamodels.datamodelKnowledge import RoundMemory
async def _persistRoundMemory(
@ -593,7 +593,7 @@ class AgentService:
for tc, result in zip(toolCalls, results):
if not result.success:
continue
classified = _classifyToolResult(tc, result)
classified = classifyToolResult(tc, result)
if not classified:
continue

View file

@ -4,7 +4,7 @@ from typing import List
import logging
from modules.datamodels.datamodelExtraction import ContentExtracted, ContentPart, ExtractionOptions, MergeStrategy
from modules.datamodels.datamodelUdm import _applyUdmOutputDetail
from modules.datamodels.datamodelUdm import applyUdmOutputDetail
from .subUtils import makeId
from .subRegistry import ExtractorRegistry, ChunkerRegistry
@ -54,7 +54,7 @@ def runExtraction(extractorRegistry: ExtractorRegistry, chunkerRegistry: Chunker
{**extractCtx, "extractionId": ec_id},
precomputedParts=parts,
)
extracted.udm = _applyUdmOutputDetail(udm, options.outputDetail)
extracted.udm = applyUdmOutputDetail(udm, options.outputDetail)
return extracted

View file

@ -47,15 +47,15 @@ class Extractor:
precomputedParts: Optional[List[ContentPart]] = None,
) -> "UdmDocument":
"""Build UDM from extracted parts (default: heuristic grouping). Override for format-specific trees."""
from modules.datamodels.datamodelUdm import _contentPartsToUdm, _mimeToUdmSourceType
from modules.datamodels.datamodelUdm import contentPartsToUdm, mimeToUdmSourceType
from modules.datamodels.datamodelExtraction import ContentExtracted
from .subUtils import makeId
parts = precomputedParts if precomputedParts is not None else self.extract(fileBytes, context)
eid = context.get("extractionId") or makeId()
extracted = ContentExtracted(id=eid, parts=parts)
src = _mimeToUdmSourceType(context.get("mimeType", ""), context.get("fileName", ""))
return _contentPartsToUdm(extracted, src, context.get("fileName", ""))
src = mimeToUdmSourceType(context.get("mimeType", ""), context.get("fileName", ""))
return contentPartsToUdm(extracted, src, context.get("fileName", ""))
def getSupportedExtensions(self) -> list[str]:
"""Return list of supported file extensions (including dots)."""

View file

@ -253,9 +253,9 @@ class KnowledgeService:
)
if resolvedMandateId:
try:
from modules.interfaces.interfaceDbBilling import _getRootInterface
from modules.interfaces.interfaceDbBilling import getRootInterface
_getRootInterface().reconcileMandateStorageBilling(str(resolvedMandateId))
getRootInterface().reconcileMandateStorageBilling(str(resolvedMandateId))
except Exception as ex:
logger.warning("reconcileMandateStorageBilling after index failed: %s", ex)
return index

View file

@ -19,7 +19,7 @@ from modules.datamodels.datamodelSubscription import (
SubscriptionStatusEnum,
BillingPeriodEnum,
OPERATIVE_STATUSES,
_getPlan,
getPlan,
_getSelectablePlans,
)
from modules.interfaces.interfaceDbSubscription import (
@ -117,7 +117,7 @@ class SubscriptionService:
return _getSelectablePlans()
def getPlan(self, planKey: str) -> Optional[SubscriptionPlan]:
return _getPlan(planKey)
return getPlan(planKey)
# =========================================================================
# T1/T2: Plan activation (creates PENDING, returns checkout URL)
@ -132,7 +132,7 @@ class SubscriptionService:
Cleans up any existing PENDING/SCHEDULED for this mandate first (by ID)."""
mid = mandateId or self.mandateId
plan = _getPlan(planKey)
plan = getPlan(planKey)
if not plan:
raise ValueError(f"Unknown plan: {planKey}")
@ -488,7 +488,7 @@ class SubscriptionService:
result = self._interface.updateFields(subscriptionId, {"recurring": False})
self.invalidateCache(mandateId)
plan = _getPlan(sub.get("planKey", ""))
plan = getPlan(sub.get("planKey", ""))
_notifySubscriptionChange(mandateId, "cancelled", plan, subscriptionRecord=sub, platformUrl=pUrl)
return result
@ -554,7 +554,7 @@ class SubscriptionService:
mandateId = sub["mandateId"]
self.invalidateCache(mandateId)
plan = _getPlan(sub.get("planKey", ""))
plan = getPlan(sub.get("planKey", ""))
_notifySubscriptionChange(mandateId, "force_cancelled", plan, subscriptionRecord=sub, platformUrl=pUrl)
return result
@ -573,8 +573,8 @@ class SubscriptionService:
)
self.invalidateCache(sub["mandateId"])
plan = _getPlan(sub.get("planKey", ""))
successorPlan = _getPlan(plan.successorPlanKey) if plan and plan.successorPlanKey else None
plan = getPlan(sub.get("planKey", ""))
successorPlan = getPlan(plan.successorPlanKey) if plan and plan.successorPlanKey else None
_notifySubscriptionChange(sub["mandateId"], "trial_expired", successorPlan)
logger.info("Trial expired for subscription %s", subscriptionId)
@ -690,7 +690,7 @@ def _buildInvoiceSummaryHtml(
) -> str:
"""Build an HTML invoice summary block for inclusion in the activation email."""
import html as htmlmod
from modules.interfaces.interfaceDbSubscription import _getRootInterface as getSubRootInterface
from modules.interfaces.interfaceDbSubscription import getRootInterface as getSubRootInterface
subInterface = getSubRootInterface()
userCount = subInterface.countActiveUsers(mandateId)

View file

@ -31,7 +31,7 @@ class AiAuditLogger:
if self._initialized:
return
try:
from modules.connectors.connectorDbPostgre import _get_cached_connector
from modules.connectors.connectorDbPostgre import getCachedConnector
from modules.shared.configuration import APP_CONFIG
from modules.datamodels.datamodelAiAudit import AiAuditLogEntry
@ -40,7 +40,7 @@ class AiAuditLogger:
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
self._db = _get_cached_connector(
self._db = getCachedConnector(
dbHost=dbHost,
dbDatabase="poweron_app",
dbUser=dbUser,

View file

@ -33,9 +33,9 @@ class AttributeDefinition(BaseModel):
visible: bool = True
order: int = 0
placeholder: Optional[str] = None
fkSource: Optional[str] = None
fkDisplayField: Optional[str] = None
fkModel: Optional[str] = None # DB table / Pydantic model name for server-side FK sort (JOIN)
# Backend adds ``{name}Label`` on rows; FormGeneratorTable reads ``displayField`` (e.g. ``userId`` → ``userIdLabel``).
displayField: Optional[str] = None
fkModel: Optional[str] = None # Pydantic / resolver name (Mandate, User, …) for server-side FK sort + label enrichment
# ------------------------------------------------------------------
# Render hints for the frontend FormGenerator / Tables.
# ``frontendFormat`` is an Excel-style format string the FE applies to numeric,
@ -104,6 +104,16 @@ def _mergedAttributeLabels(modelClass: Type[BaseModel]) -> Dict[str, str]:
return merged
def _mergedFieldJsonExtra(field) -> Dict[str, Any]:
"""Merge Pydantic FieldInfo.extra and json_schema_extra (subclass fields override)."""
merged: Dict[str, Any] = {}
if hasattr(field, "extra") and isinstance(field.extra, dict):
merged.update(field.extra)
if hasattr(field, "json_schema_extra") and isinstance(field.json_schema_extra, dict):
merged.update(field.json_schema_extra)
return merged
def getModelLabel(modelName: str) -> str:
"""Get the label for a model via resolveText()."""
modelData = _getModelLabelEntry(modelName)
@ -145,9 +155,6 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
frontend_required = field.is_required()
frontend_options = None
frontend_visible = True # Default visible
frontend_fk_source = None # FK dropdown source (e.g., "/api/users/")
frontend_fk_display_field = None # Which field of the FK target to display (e.g., "username", "name")
fk_model = None # Same as fk_model in json_schema_extra — backend JOIN target table name
# Render hints (cf. AttributeDefinition.frontendFormat / frontendFormatLabels).
# Optional Excel-like format string ("R:#'###.00") plus translatable label tokens
# for boolean/categorical render (e.g. ["Ja","-","Nein"] resolved via @i18nModel).
@ -203,14 +210,6 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
# Extract frontend_visible (default True, can be set to False to hide field)
if "frontend_visible" in json_extra:
frontend_visible = json_extra.get("frontend_visible", True)
# Extract frontend_fk_source for FK dropdown references
if "frontend_fk_source" in json_extra:
frontend_fk_source = json_extra.get("frontend_fk_source")
# Extract frontend_fk_display_field - which field of FK target to display
if "frontend_fk_display_field" in json_extra:
frontend_fk_display_field = json_extra.get("frontend_fk_display_field")
if "fk_model" in json_extra:
fk_model = json_extra.get("fk_model")
if frontend_format is None and "frontend_format" in json_extra:
frontend_format = json_extra.get("frontend_format")
if frontend_format_labels is None and "frontend_format_labels" in json_extra:
@ -273,7 +272,6 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
pass
# Hide "id" fields by default unless explicitly set to visible
# Also hide fields ending with "Id" that are FK references (unless they have fkSource)
if name == "id":
frontend_visible = False # Never show primary key in forms/tables
@ -291,15 +289,17 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
"options": _resolveOptionLabels(frontend_options),
"default": field_default,
}
# Add FK source for dropdown rendering if specified
if frontend_fk_source:
attr_def["fkSource"] = frontend_fk_source
# Also add display field if specified (which field of FK target to show)
if frontend_fk_display_field:
attr_def["fkDisplayField"] = frontend_fk_display_field
if fk_model:
attr_def["fkModel"] = fk_model
mergedExtra = _mergedFieldJsonExtra(field)
fkModelName = mergedExtra.get("fk_model")
fkTarget = mergedExtra.get("fk_target")
if not fkModelName and isinstance(fkTarget, dict) and fkTarget.get("table"):
fkModelName = fkTarget.get("table")
hasFk = bool(fkModelName) or (isinstance(fkTarget, dict) and bool(fkTarget.get("table")))
if hasFk:
attr_def["displayField"] = f"{name}Label"
if fkModelName:
attr_def["fkModel"] = fkModelName
# Render hints (Excel-like format string + i18n-resolved label tokens).
# Labels are resolved server-side via resolveText() so the FE renders them

View file

@ -39,7 +39,7 @@ def registerDatabase(dbName: str, configPrefix: str = "DB") -> None:
logger.debug(f"Database registered: {dbName} (configPrefix={configPrefix})")
def _getRegisteredDatabases() -> Dict[str, str]:
def getRegisteredDatabases() -> Dict[str, str]:
"""Return snapshot of all registered databases {dbName: configPrefix}."""
with _lock:
return dict(_registry)

View file

@ -19,7 +19,7 @@ def _resolveLogDir() -> str:
logDir = os.path.join(gatewayDir, logDir)
return logDir
def _ensureDir(path: str) -> None:
def ensureDir(path: str) -> None:
"""Create directory if it does not exist."""
os.makedirs(path, exist_ok=True)
@ -27,7 +27,7 @@ def _isDebugEnabled() -> bool:
"""Check if debug workflow logging is enabled."""
return APP_CONFIG.get("APP_DEBUG_CHAT_WORKFLOW_ENABLED", False)
def _getBaseDebugDir() -> str:
def getBaseDebugDir() -> str:
"""Get the base debug directory path from configuration."""
# Check if custom debug directory is configured
customDebugDir = APP_CONFIG.get("APP_DEBUG_CHAT_WORKFLOW_DIR", None)
@ -47,7 +47,7 @@ def _getBaseDebugDir() -> str:
def _getDebugDir() -> str:
"""Get the debug prompts directory path from configuration."""
baseDebugDir = _getBaseDebugDir()
baseDebugDir = getBaseDebugDir()
return os.path.join(baseDebugDir, 'prompts')
def _getNextSequenceNumber() -> int:
@ -79,7 +79,7 @@ def writeDebugFile(content: str, fileType: str, documents: Optional[List] = None
return
debugDir = _getDebugDir()
_ensureDir(debugDir)
ensureDir(debugDir)
seqNum = _getNextSequenceNumber()
ts = datetime.now(UTC).strftime('%Y%m%d-%H%M%S')
@ -128,8 +128,8 @@ def debugLogToFile(message: str, context: str = "DEBUG") -> None:
return
# Get debug directory (use base debug dir, not prompts subdirectory)
debug_dir = _getBaseDebugDir()
_ensureDir(debug_dir)
debug_dir = getBaseDebugDir()
ensureDir(debug_dir)
# Create debug file path
debug_file = os.path.join(debug_dir, "debug_workflow.log")

View file

@ -14,8 +14,8 @@ for the *target* side. By collecting all such declarations we know which DB
each table lives in no extra registration step needed.
Usage:
from modules.shared.fkRegistry import _getFkRelationships
rels = _getFkRelationships()
from modules.shared.fkRegistry import getFkRelationships
rels = getFkRelationships()
"""
import importlib
@ -25,7 +25,7 @@ import threading
from dataclasses import dataclass
from typing import Dict, List, Optional
from modules.datamodels.datamodelBase import _MODEL_REGISTRY
from modules.datamodels.datamodelBase import MODEL_REGISTRY
logger = logging.getLogger(__name__)
@ -33,7 +33,7 @@ _modelsLoaded = False
def _ensureModelsLoaded() -> None:
"""Import all datamodel modules so that __init_subclass__ fills _MODEL_REGISTRY.
"""Import all datamodel modules so that __init_subclass__ fills MODEL_REGISTRY.
In a running server the interfaces import the datamodels automatically.
This function makes FK-Discovery work in standalone / test contexts too.
@ -96,7 +96,7 @@ def _buildTableToDbMap() -> Dict[str, str]:
_ensureModelsLoaded()
mapping: Dict[str, str] = {}
for modelCls in _MODEL_REGISTRY.values():
for modelCls in MODEL_REGISTRY.values():
for fieldInfo in modelCls.model_fields.values():
extra = fieldInfo.json_schema_extra
if not isinstance(extra, dict):
@ -109,11 +109,11 @@ def _buildTableToDbMap() -> Dict[str, str]:
if table and db:
mapping[table] = db
unmapped = [name for name in _MODEL_REGISTRY if name not in mapping]
unmapped = [name for name in MODEL_REGISTRY if name not in mapping]
if unmapped:
try:
from modules.shared.dbRegistry import _getRegisteredDatabases
_resolveUnmappedTablesFromCatalog(mapping, unmapped, _getRegisteredDatabases())
from modules.shared.dbRegistry import getRegisteredDatabases
_resolveUnmappedTablesFromCatalog(mapping, unmapped, getRegisteredDatabases())
except Exception as e:
logger.warning(f"Could not resolve unmapped tables from catalog: {e}")
@ -175,7 +175,7 @@ def _discoverFkRelationships() -> List[FkRelationship]:
tableToDb = _buildTableToDbMap()
relationships: List[FkRelationship] = []
for tableName, modelCls in _MODEL_REGISTRY.items():
for tableName, modelCls in MODEL_REGISTRY.items():
sourceDb = tableToDb.get(tableName)
if sourceDb is None:
continue
@ -211,7 +211,7 @@ def _discoverFkRelationships() -> List[FkRelationship]:
return relationships
def _getFkRelationships() -> List[FkRelationship]:
def getFkRelationships() -> List[FkRelationship]:
"""Return the cached list of FK relationships (discovered on first call)."""
global _cachedRelationships
with _lock:

View file

@ -87,7 +87,7 @@ class _I18nRegistryEntry:
_REGISTRY: Dict[str, _I18nRegistryEntry] = {}
# ---------------------------------------------------------------------------
# Translation cache (populated at boot by _loadCache)
# Translation cache (populated at boot by loadCache)
# ---------------------------------------------------------------------------
_CACHE: Dict[str, Dict[str, str]] = {}
@ -245,7 +245,7 @@ def _extractDocstringFirstLine(cls: type) -> str:
# Language setter (called by middleware)
# ---------------------------------------------------------------------------
def _setLanguage(lang: str):
def setLanguage(lang: str):
"""Set the language for the current request context."""
_CURRENT_LANGUAGE.set(lang)
@ -558,13 +558,13 @@ def _registerAccountingConnectorLabels():
"""
added = 0
try:
from modules.features.trustee.accounting.accountingRegistry import _getAccountingRegistry
from modules.features.trustee.accounting.accountingRegistry import getAccountingRegistry
except ImportError:
logger.debug("i18n accounting connectors: registry not importable")
return
try:
registry = _getAccountingRegistry()
registry = getAccountingRegistry()
except Exception as e:
logger.warning("i18n accounting connectors: registry init failed: %s", e)
return
@ -650,7 +650,7 @@ def _registerDatamodelOptionLabels():
# Boot: sync registry to DB
# ---------------------------------------------------------------------------
async def _syncRegistryToDb():
async def syncRegistryToDb():
"""Boot hook: write all registered keys into UiLanguageSet(xx).
1. Scans route files for routeApiMsg("") to eagerly register api.* keys.
@ -674,10 +674,10 @@ async def _syncRegistryToDb():
from modules.datamodels.datamodelUiLanguage import UiLanguageSet
from modules.shared.configuration import APP_CONFIG
from modules.connectors.connectorDbPostgre import _get_cached_connector
from modules.connectors.connectorDbPostgre import getCachedConnector
from modules.shared.timeUtils import getUtcTimestamp
db = _get_cached_connector(
db = getCachedConnector(
dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
dbDatabase="poweron_management",
dbUser=APP_CONFIG.get("DB_USER"),
@ -764,7 +764,7 @@ async def _syncRegistryToDb():
# Boot: load translation cache
# ---------------------------------------------------------------------------
async def _loadCache():
async def loadCache():
"""Boot hook: load all UiLanguageSets into the in-memory cache.
Also persistently repairs placeholder mismatches in the DB:
@ -778,9 +778,9 @@ async def _loadCache():
"""
from modules.datamodels.datamodelUiLanguage import UiLanguageSet
from modules.shared.configuration import APP_CONFIG
from modules.connectors.connectorDbPostgre import _get_cached_connector
from modules.connectors.connectorDbPostgre import getCachedConnector
db = _get_cached_connector(
db = getCachedConnector(
dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
dbDatabase="poweron_management",
dbUser=APP_CONFIG.get("DB_USER"),

View file

@ -109,7 +109,7 @@ def _resolveAllRecipients(mandateId: str) -> List[str]:
# ============================================================================
def _resolveMandateName(mandateId: str) -> str:
def resolveMandateName(mandateId: str) -> str:
"""Return the human-readable mandate name (label or name), falling back to a short ID."""
try:
from modules.datamodels.datamodelUam import Mandate
@ -141,7 +141,7 @@ def _getOperatorInfo() -> Dict[str, str]:
return {"companyName": "", "address": "", "vatNumber": ""}
def _renderHtmlEmail(
def renderHtmlEmail(
headline: str,
bodyParagraphs: List[str],
mandateName: str,
@ -256,8 +256,8 @@ def notifyMandateAdmins(
)
return 0
mandateName = _resolveMandateName(mandateId)
htmlMessage = _renderHtmlEmail(headline, bodyParagraphs, mandateName, footerNote, rawHtmlBlock)
mandateName = resolveMandateName(mandateId)
htmlMessage = renderHtmlEmail(headline, bodyParagraphs, mandateName, footerNote, rawHtmlBlock)
messaging = getMessagingInterface()
successCount = 0

View file

@ -36,7 +36,7 @@ _DEFAULT_REQUEST_TZ = "UTC"
_CURRENT_TIMEZONE: ContextVar[str] = ContextVar("user_tz", default=_DEFAULT_REQUEST_TZ)
def _setRequestTimezone(tzName: str) -> None:
def setRequestTimezone(tzName: str) -> None:
"""Set the current request's user timezone (called by gateway middleware).
Validates against zoneinfo; falls back to UTC for unknown/invalid names so

View file

@ -16,8 +16,8 @@ import psycopg2
import psycopg2.extras
from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import _getRegisteredDatabases
from modules.shared.fkRegistry import _getFkRelationships, FkRelationship
from modules.shared.dbRegistry import getRegisteredDatabases
from modules.shared.fkRegistry import getFkRelationships, FkRelationship
logger = logging.getLogger(__name__)
@ -94,7 +94,7 @@ class OrphanCleanupRefused(Exception):
def _getConnection(dbName: str):
"""Open a psycopg2 connection to the given registered database."""
registeredDbs = _getRegisteredDatabases()
registeredDbs = getRegisteredDatabases()
configPrefix = registeredDbs.get(dbName)
if configPrefix is None:
raise ValueError(f"Database '{dbName}' is not registered.")
@ -126,7 +126,7 @@ def _getTableStats(dbFilter: Optional[str] = None) -> List[dict]:
Returns a list of TableStats dicts, optionally filtered by database name.
"""
registeredDbs = _getRegisteredDatabases()
registeredDbs = getRegisteredDatabases()
if dbFilter:
registeredDbs = {k: v for k, v in registeredDbs.items() if k == dbFilter}
@ -297,7 +297,7 @@ def _scanOrphans(dbFilter: Optional[str] = None) -> List[dict]:
return [r for r in cached if r["sourceDb"] == dbFilter]
return list(cached)
relationships = _getFkRelationships()
relationships = getFkRelationships()
if dbFilter:
relationships = [r for r in relationships if r.sourceDb == dbFilter]
@ -450,7 +450,7 @@ def _cleanOrphans(db: str, table: str, column: str, force: bool = False) -> int:
These guards prevent catastrophic wipes (e.g. emptying FeatureInstance because
the User table happened to be empty in the wrong DB at scan time).
"""
relationships = _getFkRelationships()
relationships = getFkRelationships()
rel = next(
(r for r in relationships
if r.sourceDb == db and r.sourceTable == table and r.sourceColumn == column),
@ -643,7 +643,7 @@ def _listOrphans(
the SysAdmin UI can present them as a download (CSV/JSON) for review before
the destructive cleanup is triggered.
"""
relationships = _getFkRelationships()
relationships = getFkRelationships()
rel = next(
(r for r in relationships
if r.sourceDb == db and r.sourceTable == table and r.sourceColumn == column),

View file

@ -359,7 +359,7 @@ NAVIGATION_SECTIONS = [
]
def _objectKeyToUiComponent(objectKey: str) -> str:
def objectKeyToUiComponent(objectKey: str) -> str:
"""
Convert objectKey to uiComponent.

View file

@ -26,7 +26,7 @@ from modules.workflows.automation2.executors import (
PauseForHumanTaskError,
PauseForEmailWaitError,
)
from modules.features.graphicalEditor.portTypes import _normalizeToSchema
from modules.features.graphicalEditor.portTypes import normalizeToSchema
from modules.features.graphicalEditor.nodeDefinitions import STATIC_NODE_TYPES
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import SubscriptionInactiveException as _SubscriptionInactiveException
from modules.serviceCenter.services.serviceBilling.mainServiceBilling import BillingContextError as _BillingContextError
@ -102,11 +102,11 @@ def _allMergePredecessorsReady(
def _normalizeResult(result: Any, nodeType: str) -> Any:
"""Apply _normalizeToSchema if the node has a declared output schema."""
"""Apply normalizeToSchema if the node has a declared output schema."""
schema = _outputSchemaForNode(nodeType)
if schema and schema != "Transit" and isinstance(result, dict):
try:
return _normalizeToSchema(result, schema)
return normalizeToSchema(result, schema)
except Exception as e:
logger.warning(f"_normalizeResult failed for nodeType={nodeType}, schema={schema}: {e}")
return result
@ -375,7 +375,7 @@ async def executeGraph(
schema = _outputSchemaForNode(resumedType)
if schema and schema != "Transit":
try:
initialNodeOutputs[startAfterNodeId] = _normalizeToSchema(resumedOutput, schema)
initialNodeOutputs[startAfterNodeId] = normalizeToSchema(resumedOutput, schema)
except Exception as valErr:
logger.warning("executeGraph resume: schema validation failed for %s: %s", startAfterNodeId, valErr)
if not runId and automation2_interface and workflowId and not is_resume:
@ -818,8 +818,8 @@ async def executeGraph(
) if _wfObj else {}
_shouldNotify = _wfDict.get("notifyOnFailure", True) if _wfDict else True
if _shouldNotify:
from modules.workflows.scheduler.mainScheduler import _notifyRunFailed
_notifyRunFailed(
from modules.workflows.scheduler.mainScheduler import notifyRunFailed
notifyRunFailed(
workflowId or "", runId or "", str(e),
mandateId=mandateId,
workflowLabel=_wfDict.get("label"),

View file

@ -11,7 +11,7 @@ from typing import Any, Dict, Optional
from modules.features.graphicalEditor.portTypes import (
_normalizeError,
_normalizeToSchema,
normalizeToSchema,
)
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import SubscriptionInactiveException as _SubscriptionInactiveException
from modules.serviceCenter.services.serviceBilling.mainServiceBilling import BillingContextError as _BillingContextError
@ -407,7 +407,7 @@ class ActionNodeExecutor:
"count": int(data_dict.get("count", 0)),
}
_attachConnectionProvenance(cr_out, resolvedParams, outputSchema, chatService, self.services)
return _normalizeToSchema(cr_out, outputSchema)
return normalizeToSchema(cr_out, outputSchema)
_attachConnectionProvenance(out, resolvedParams, outputSchema, chatService, self.services)
return _normalizeToSchema(out, outputSchema)
return normalizeToSchema(out, outputSchema)

View file

@ -4,7 +4,7 @@
import logging
from typing import Any, Dict
from modules.features.graphicalEditor.portTypes import _unwrapTransit, _wrapTransit
from modules.features.graphicalEditor.portTypes import unwrapTransit, wrapTransit
logger = logging.getLogger(__name__)
@ -52,7 +52,7 @@ class DataExecutor:
if inp is None:
return {"items": [], "count": 0, "_success": True}
data = _unwrapTransit(inp) if isinstance(inp, dict) and inp.get("_transit") else inp
data = unwrapTransit(inp) if isinstance(inp, dict) and inp.get("_transit") else inp
if mode == "collect":
items = [data] if data is not None else []
@ -77,7 +77,7 @@ class DataExecutor:
) -> Any:
"""Filter items by condition expression and/or UDM content type. Returns Transit envelope."""
inp = self._getInput(inputSources, nodeOutputs)
data = _unwrapTransit(inp) if isinstance(inp, dict) and inp.get("_transit") else inp
data = unwrapTransit(inp) if isinstance(inp, dict) and inp.get("_transit") else inp
params = node.get("parameters") or {}
condition = params.get("condition", "")
udmContentType = params.get("udmContentType", "")
@ -102,7 +102,7 @@ class DataExecutor:
elif isinstance(data, list):
filteredData = filtered
return _wrapTransit(filteredData, {
return wrapTransit(filteredData, {
"originalCount": originalCount,
"filteredCount": len(filtered),
})
@ -116,7 +116,7 @@ class DataExecutor:
) -> Any:
"""Deterministic consolidation: table, concat, merge, csvJoin."""
inp = self._getInput(inputSources, nodeOutputs)
data = _unwrapTransit(inp) if isinstance(inp, dict) and inp.get("_transit") else inp
data = unwrapTransit(inp) if isinstance(inp, dict) and inp.get("_transit") else inp
params = node.get("parameters") or {}
mode = params.get("mode", "table")
separator = params.get("separator", "\n")

View file

@ -4,7 +4,7 @@
import logging
from typing import Any, Dict
from modules.features.graphicalEditor.portTypes import _wrapTransit, _unwrapTransit
from modules.features.graphicalEditor.portTypes import wrapTransit, unwrapTransit
logger = logging.getLogger(__name__)
@ -68,8 +68,8 @@ class FlowExecutor:
condParam = (node.get("parameters") or {}).get("condition")
inp = self._getInputData(nodeId, {nodeId: inputSources}, nodeOutputs)
ok = self._evalConditionParam(condParam, nodeOutputs)
return _wrapTransit(
_unwrapTransit(inp) if inp else inp,
return wrapTransit(
unwrapTransit(inp) if inp else inp,
{"branch": 0 if ok else 1, "conditionResult": ok},
)
@ -215,12 +215,12 @@ class FlowExecutor:
inp = self._getInputData(nodeId, {nodeId: inputSources}, nodeOutputs)
for i, c in enumerate(cases):
if self._evalSwitchCase(value, c):
return _wrapTransit(
_unwrapTransit(inp) if inp else inp,
return wrapTransit(
unwrapTransit(inp) if inp else inp,
{"match": i, "value": value},
)
return _wrapTransit(
_unwrapTransit(inp) if inp else inp,
return wrapTransit(
unwrapTransit(inp) if inp else inp,
{"match": -1, "value": value},
)
@ -339,7 +339,7 @@ class FlowExecutor:
for portIdx, (srcId, srcOut) in inputSources.items():
out = nodeOutputs.get(srcId)
if out is not None:
inputs[portIdx] = _unwrapTransit(out)
inputs[portIdx] = unwrapTransit(out)
first = None
merged: Dict = {}

View file

@ -131,9 +131,9 @@ def parse_graph_defined_schema(node: Dict[str, Any], parameter_key: str) -> Opti
Build a JSON-serializable port schema dict from graph parameters (e.g. form ``fields``).
Used by tooling and future API surfaces; mirrors ``parse_graph_defined_output_schema`` logic.
"""
from modules.features.graphicalEditor.portTypes import _derive_form_payload_schema_from_param
from modules.features.graphicalEditor.portTypes import deriveFormPayloadSchemaFromParam
sch = _derive_form_payload_schema_from_param(node, parameter_key)
sch = deriveFormPayloadSchemaFromParam(node, parameter_key)
if sch is None:
return None
return {
@ -371,8 +371,8 @@ def resolveParameterReferences(value: Any, nodeOutputs: Dict[str, Any]) -> Any:
return resolveParameterReferences(inner, nodeOutputs)
if value.get("type") == "system":
variable = value.get("variable", "")
from modules.features.graphicalEditor.portTypes import _resolveSystemVariable
return _resolveSystemVariable(variable, nodeOutputs.get("_context", {}))
from modules.features.graphicalEditor.portTypes import resolveSystemVariable
return resolveSystemVariable(variable, nodeOutputs.get("_context", {}))
return {k: resolveParameterReferences(v, nodeOutputs) for k, v in value.items()}
if isinstance(value, str):

View file

@ -332,7 +332,7 @@ def _cronToIntervalSeconds(cron: str):
return None
def _notifyRunFailed(workflowId: str, runId: str, error: str, mandateId: str = None, workflowLabel: str = None) -> None:
def notifyRunFailed(workflowId: str, runId: str, error: str, mandateId: str = None, workflowLabel: str = None) -> None:
"""Notify on workflow run failure: emit event, create in-app notification, trigger email subscription."""
try:
eventManager.emit("graphicalEditor.run.failed", {

View file

@ -21,8 +21,8 @@ def db():
@pytest.fixture(scope="session")
def demoConfig():
"""The investor demo config instance."""
from modules.demoConfigs import _getDemoConfigByCode
cfg = _getDemoConfigByCode("investor-demo-2026")
from modules.demoConfigs import getDemoConfigByCode
cfg = getDemoConfigByCode("investor-demo-2026")
assert cfg is not None, "Demo config 'investor-demo-2026' not found — check modules/demoConfigs/"
return cfg

View file

@ -15,25 +15,25 @@ class TestDemoConfigDiscovery:
"""Test the auto-discovery module (no HTTP needed)."""
def test_discoveryFindsInvestorConfig(self):
from modules.demoConfigs import _getAvailableDemoConfigs
configs = _getAvailableDemoConfigs()
from modules.demoConfigs import getAvailableDemoConfigs
configs = getAvailableDemoConfigs()
assert "investor-demo-2026" in configs, f"Available configs: {list(configs.keys())}"
def test_getByCodeReturnsInstance(self):
from modules.demoConfigs import _getDemoConfigByCode
cfg = _getDemoConfigByCode("investor-demo-2026")
from modules.demoConfigs import getDemoConfigByCode
cfg = getDemoConfigByCode("investor-demo-2026")
assert cfg is not None
assert cfg.code == "investor-demo-2026"
assert cfg.label == "Investor Demo April 2026"
def test_getByCodeReturnsNoneForUnknown(self):
from modules.demoConfigs import _getDemoConfigByCode
cfg = _getDemoConfigByCode("nonexistent-config")
from modules.demoConfigs import getDemoConfigByCode
cfg = getDemoConfigByCode("nonexistent-config")
assert cfg is None
def test_toDictHasRequiredFields(self):
from modules.demoConfigs import _getDemoConfigByCode
cfg = _getDemoConfigByCode("investor-demo-2026")
from modules.demoConfigs import getDemoConfigByCode
cfg = getDemoConfigByCode("investor-demo-2026")
d = cfg.toDict()
assert "code" in d
assert "label" in d

View file

@ -39,8 +39,8 @@ pytestmark = [pytest.mark.expensive, pytest.mark.live]
@pytest.fixture(scope="session")
def pwgDemoConfig():
"""Auto-discovered ``PwgDemo2026`` instance."""
from modules.demoConfigs import _getDemoConfigByCode
cfg = _getDemoConfigByCode("pwg-demo-2026")
from modules.demoConfigs import getDemoConfigByCode
cfg = getDemoConfigByCode("pwg-demo-2026")
assert cfg is not None, (
"Demo config 'pwg-demo-2026' not found — check modules/demoConfigs/pwgDemo2026.py"
)

View file

@ -12,7 +12,7 @@ import time
from modules.features.redmine.serviceRedmineStatsCache import (
RedmineStatsCache,
_getStatsCache,
getStatsCache,
)
@ -52,6 +52,6 @@ class TestRedmineStatsCache:
assert c.get(c.buildKey("inst-b", None, None, "week", [])) == "v3"
def test_singletonIsStable(self) -> None:
a = _getStatsCache()
b = _getStatsCache()
a = getStatsCache()
b = getStatsCache()
assert a is b

View file

@ -1,7 +1,7 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
from modules.datamodels.datamodelExtraction import ContentExtracted, ContentPart
from modules.datamodels.datamodelUdm import _contentPartsToUdm, _udmToContentParts
from modules.datamodels.datamodelUdm import contentPartsToUdm, _udmToContentParts
def test_bridge_pdf_like_pages():
@ -36,7 +36,7 @@ def test_bridge_pdf_like_pages():
),
]
extracted = ContentExtracted(id="ext1", parts=parts)
udm = _contentPartsToUdm(extracted, "pdf", "a.pdf")
udm = contentPartsToUdm(extracted, "pdf", "a.pdf")
assert udm.sourceType == "pdf"
assert len(udm.children) == 2
assert all(n.role == "page" for n in udm.children)
@ -45,7 +45,7 @@ def test_bridge_pdf_like_pages():
def test_udm_to_parts_roundtrip_preserves_ids():
udm = _contentPartsToUdm(
udm = contentPartsToUdm(
ContentExtracted(
id="e1",
parts=[

View file

@ -0,0 +1,59 @@
# Copyright (c) 2025 Patrick Motsch
"""Guardrails for Trustee ``getTemplateWorkflows`` graphs (new instance bootstrap)."""
from __future__ import annotations
import json
from modules.features.trustee.mainTrustee import getTemplateWorkflows
def _receiptTemplateGraph():
templates = getTemplateWorkflows() or []
t = next((w for w in templates if w.get("id") == "trustee-receipt-import"), None)
assert t is not None, "template trustee-receipt-import must exist"
return t.get("graph") or {}
def _materializeInstance(graph: dict, instanceId: str) -> dict:
raw = json.dumps(graph)
raw = raw.replace("{{featureInstanceId}}", instanceId)
return json.loads(raw)
class TestTrusteeTemplateReceiptImport:
"""The receipt-import chain must use explicit DataRefs (Pick-not-Push).
Empty ``documentList: []`` is not auto-wired by ``materializeConnectionRefs``
(that helper only materializes empty ``userConnection`` references).
"""
def test_receiptImportWiresDocumentListRefs(self):
g = _receiptTemplateGraph()
inst = "00000000-0000-0000-0000-000000000001"
g = _materializeInstance(g, inst)
byId = {n["id"]: n for n in g.get("nodes", []) if isinstance(n, dict) and n.get("id")}
proc = byId.get("process")
sync = byId.get("sync")
assert proc and sync
dlp = (proc.get("parameters") or {}).get("documentList")
dls = (sync.get("parameters") or {}).get("documentList")
assert isinstance(dlp, dict) and dlp.get("type") == "ref"
assert dlp.get("nodeId") == "extract"
assert dlp.get("path") == ["documents"]
assert isinstance(dls, dict) and dls.get("type") == "ref"
assert dls.get("nodeId") == "process"
assert dls.get("path") == ["documents"]
def test_receiptImportFeatureInstanceIdSubstituted(self):
g = _receiptTemplateGraph()
inst = "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"
g = _materializeInstance(g, inst)
for n in g.get("nodes", []):
p = n.get("parameters") or {}
if "featureInstanceId" in p:
assert p["featureInstanceId"] == inst

View file

@ -0,0 +1,94 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Unit tests for the Abacus connector's getAccountBalances aggregation logic."""
from unittest.mock import patch
import pytest
from modules.features.trustee.accounting.connectors.accountingConnectorAbacus import (
AccountingConnectorAbacus,
_isIncomeStatementAccount,
)
class TestIsIncomeStatementAccount:
@pytest.mark.parametrize("accno,expected", [
("1020", False), ("2010", False), ("3000", True), ("8500", True),
])
def test_classification(self, accno, expected):
assert _isIncomeStatementAccount(accno) == expected
class TestAbacusGetAccountBalances:
@pytest.mark.asyncio
async def test_aggregatesFromGeneralJournalEntries(self):
connector = AccountingConnectorAbacus()
rawEntries = [
{
"Id": "e1", "JournalDate": "2025-01-15T00:00:00",
"Lines": [
{"AccountId": "1020", "DebitAmount": 1000.0, "CreditAmount": 0.0},
{"AccountId": "6000", "DebitAmount": 0.0, "CreditAmount": 1000.0},
],
},
{
"Id": "e2", "JournalDate": "2025-12-20T00:00:00",
"Lines": [
{"AccountId": "1020", "DebitAmount": 500.0, "CreditAmount": 0.0},
{"AccountId": "6000", "DebitAmount": 0.0, "CreditAmount": 500.0},
],
},
]
async def _fakeAuth(self, config):
return {"Authorization": "Bearer X"}
async def _fakeFetch(self, config, headers, dateTo):
return rawEntries
with patch.object(AccountingConnectorAbacus, "_buildAuthHeaders", _fakeAuth), \
patch.object(AccountingConnectorAbacus, "_fetchAllJournalEntries", _fakeFetch):
balances = await connector.getAccountBalances({"apiBaseUrl": "http://x", "clientName": "y"}, years=[2025])
byPeriod = {(b.accountNumber, b.periodYear, b.periodMonth): b for b in balances}
assert byPeriod[("1020", 2025, 1)].closingBalance == 1000.0
assert byPeriod[("1020", 2025, 12)].closingBalance == 1500.0
assert byPeriod[("1020", 2025, 0)].closingBalance == 1500.0
# 6000 is income statement (3xxx-9xxx) but credit-side -- closing = -1500 (net debit-credit)
assert byPeriod[("6000", 2025, 0)].closingBalance == -1500.0
# ER account January opening must be 0 (no prior-year carry)
assert byPeriod[("6000", 2025, 1)].openingBalance == 0.0
@pytest.mark.asyncio
async def test_balanceSheetCarryOverAcrossYears(self):
connector = AccountingConnectorAbacus()
rawEntries = [
{
"Id": "e1", "JournalDate": "2024-06-30T00:00:00",
"Lines": [
{"AccountId": "1020", "DebitAmount": 7000.0, "CreditAmount": 0.0},
{"AccountId": "9999", "DebitAmount": 0.0, "CreditAmount": 7000.0},
],
},
]
async def _fakeAuth(self, config):
return {"Authorization": "Bearer X"}
async def _fakeFetch(self, config, headers, dateTo):
return rawEntries
with patch.object(AccountingConnectorAbacus, "_buildAuthHeaders", _fakeAuth), \
patch.object(AccountingConnectorAbacus, "_fetchAllJournalEntries", _fakeFetch):
balances = await connector.getAccountBalances({"apiBaseUrl": "http://x", "clientName": "y"}, years=[2025])
byPeriod = {(b.accountNumber, b.periodYear, b.periodMonth): b for b in balances}
# 1020 (BS) opens 2025 with 7000 from prior year, no movements in 2025 -> closes at 7000
assert byPeriod[("1020", 2025, 1)].openingBalance == 7000.0
assert byPeriod[("1020", 2025, 0)].openingBalance == 7000.0
assert byPeriod[("1020", 2025, 0)].closingBalance == 7000.0

View file

@ -0,0 +1,114 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Unit tests for the Bexio connector's getAccountBalances aggregation logic."""
from unittest.mock import patch
import pytest
from modules.features.trustee.accounting.connectors.accountingConnectorBexio import (
AccountingConnectorBexio,
_isIncomeStatementAccount,
)
class TestIsIncomeStatementAccount:
@pytest.mark.parametrize("accno,expected", [
("1020", False), ("2010", False), ("3000", True), ("9999", True), ("", False),
])
def test_classification(self, accno, expected):
assert _isIncomeStatementAccount(accno) == expected
class TestBexioGetAccountBalances:
@pytest.mark.asyncio
async def test_aggregatesBalanceSheetAccount_cumulativeAcrossMonths(self):
connector = AccountingConnectorBexio()
accounts = [{"id": 100, "account_no": "1020"}, {"id": 200, "account_no": "6000"}]
# Simulate a clean year for account 1020 (BS): +1000 in Jan, -300 in Feb, +500 in Dec
rawJournal = [
{"date": "2025-01-15", "amount": 1000.0, "debit_account_id": 100, "credit_account_id": 200},
{"date": "2025-02-10", "amount": 300.0, "debit_account_id": 200, "credit_account_id": 100},
{"date": "2025-12-20", "amount": 500.0, "debit_account_id": 100, "credit_account_id": 200},
]
async def _fakeAccounts(self, config):
return accounts
async def _fakeJournal(self, config, dateTo):
return rawJournal
with patch.object(AccountingConnectorBexio, "_loadRawAccounts", _fakeAccounts), \
patch.object(AccountingConnectorBexio, "_fetchAllJournalRows", _fakeJournal):
balances = await connector.getAccountBalances({"accessToken": "x", "apiBaseUrl": "http://x"}, years=[2025])
byPeriod = {(b.accountNumber, b.periodMonth): b for b in balances if b.periodYear == 2025}
# Account 1020 (BS) cumulative: Jan +1000, Feb +1000-300=700, Dec +700+500=1200
assert byPeriod[("1020", 1)].closingBalance == 1000.0
assert byPeriod[("1020", 2)].closingBalance == 700.0
assert byPeriod[("1020", 11)].closingBalance == 700.0
assert byPeriod[("1020", 12)].closingBalance == 1200.0
assert byPeriod[("1020", 0)].closingBalance == 1200.0 # annual
assert byPeriod[("1020", 0)].openingBalance == 0.0
assert byPeriod[("1020", 1)].openingBalance == 0.0
assert byPeriod[("1020", 2)].openingBalance == 1000.0 # = previous month's closing
@pytest.mark.asyncio
async def test_balanceSheetAccount_carriesPriorYearOpening(self):
connector = AccountingConnectorBexio()
accounts = [{"id": 100, "account_no": "1020"}, {"id": 200, "account_no": "6000"}]
rawJournal = [
{"date": "2024-06-01", "amount": 5000.0, "debit_account_id": 100, "credit_account_id": 200},
{"date": "2025-03-15", "amount": 1000.0, "debit_account_id": 100, "credit_account_id": 200},
]
async def _fakeAccounts(self, config):
return accounts
async def _fakeJournal(self, config, dateTo):
return rawJournal
with patch.object(AccountingConnectorBexio, "_loadRawAccounts", _fakeAccounts), \
patch.object(AccountingConnectorBexio, "_fetchAllJournalRows", _fakeJournal):
balances = await connector.getAccountBalances({"accessToken": "x", "apiBaseUrl": "http://x"}, years=[2025])
byPeriod = {(b.accountNumber, b.periodMonth): b for b in balances if b.periodYear == 2025}
# 2025 opening for 1020 = 5000 (carried over from 2024)
assert byPeriod[("1020", 1)].openingBalance == 5000.0
assert byPeriod[("1020", 0)].openingBalance == 5000.0
assert byPeriod[("1020", 12)].closingBalance == 6000.0
assert byPeriod[("1020", 0)].closingBalance == 6000.0
@pytest.mark.asyncio
async def test_incomeStatementAccount_resetsToZeroEachYear(self):
connector = AccountingConnectorBexio()
accounts = [{"id": 200, "account_no": "6000"}, {"id": 300, "account_no": "1020"}]
rawJournal = [
{"date": "2024-12-31", "amount": 99999.99, "debit_account_id": 200, "credit_account_id": 300},
{"date": "2025-06-15", "amount": 250.0, "debit_account_id": 200, "credit_account_id": 300},
]
async def _fakeAccounts(self, config):
return accounts
async def _fakeJournal(self, config, dateTo):
return rawJournal
with patch.object(AccountingConnectorBexio, "_loadRawAccounts", _fakeAccounts), \
patch.object(AccountingConnectorBexio, "_fetchAllJournalRows", _fakeJournal):
balances = await connector.getAccountBalances({"accessToken": "x", "apiBaseUrl": "http://x"}, years=[2025])
byPeriod = {(b.accountNumber, b.periodMonth): b for b in balances if b.periodYear == 2025}
# ER account 6000: prior year had 99999.99 movement; 2025 opening MUST be 0
assert byPeriod[("6000", 1)].openingBalance == 0.0
assert byPeriod[("6000", 0)].openingBalance == 0.0
assert byPeriod[("6000", 6)].closingBalance == 250.0
assert byPeriod[("6000", 12)].closingBalance == 250.0
assert byPeriod[("6000", 0)].closingBalance == 250.0

View file

@ -0,0 +1,156 @@
# Copyright (c) 2026 Patrick Motsch
# All rights reserved.
"""Unit tests for the RMA connector's getAccountBalances implementation.
Mocks the `_fetchSaldoRows` low-level call so we exercise the orchestration
logic (period iteration, ER/BS handling, opening/closing carry-over) without
hitting the real RMA HTTP API.
"""
import json
from typing import Dict
from unittest.mock import patch
import pytest
from modules.features.trustee.accounting.connectors.accountingConnectorRma import (
AccountingConnectorRma,
_formatLastDayOfMonth,
_isIncomeStatementAccount,
_parseSaldoBody,
)
class TestParseSaldoBody:
def test_jsonRowsParsed(self):
body = json.dumps({
"row": [
{"column": ["1020", "Bank UBS", "48507.4100"]},
{"column": ["6000", "Personalaufwand", "12000.00"]},
]
})
rows = _parseSaldoBody(body)
assert ("1020", 48507.41) in rows
assert ("6000", 12000.0) in rows
def test_xmlRowsParsed(self):
body = (
"<table>"
"<row><column>1020</column><column>Bank</column><column>48507.41</column></row>"
"<row><column>2010</column><column>AHV</column><column>-1234.50</column></row>"
"</table>"
)
rows = _parseSaldoBody(body)
assert ("1020", 48507.41) in rows
assert ("2010", -1234.5) in rows
def test_emptyAndMalformedReturnEmpty(self):
assert _parseSaldoBody("") == []
assert _parseSaldoBody("not even json or xml") == []
assert _parseSaldoBody('{"row": []}') == []
class TestIsIncomeStatementAccount:
@pytest.mark.parametrize("accno,expected", [
("1020", False),
("2010", False),
("2800", False),
("3200", True),
("6000", True),
("9100", True),
("", False),
("ABC", False),
])
def test_classification(self, accno, expected):
assert _isIncomeStatementAccount(accno) == expected
class TestFormatLastDayOfMonth:
def test_january(self):
assert _formatLastDayOfMonth(2025, 1) == "2025-01-31"
def test_february_nonLeap(self):
assert _formatLastDayOfMonth(2025, 2) == "2025-02-28"
def test_february_leap(self):
assert _formatLastDayOfMonth(2024, 2) == "2024-02-29"
def test_december(self):
assert _formatLastDayOfMonth(2025, 12) == "2025-12-31"
class TestRmaGetAccountBalances:
"""Reproduces the BuHa SoHa scenario: account 1020 closing balance per
31.12.2025 = 48'507.41, with prior-year opening 30'927.62.
"""
@pytest.mark.asyncio
async def test_buhaSohaScenario_yieldsAuthoritativeBalances(self):
connector = AccountingConnectorRma()
priorYearEndSaldo = 30927.62
decemberSaldo = 48507.41
# Simplified monthly progression: linear ramp from 31000 -> 48507.41
monthlySaldos = {
1: 31200.00, 2: 32500.00, 3: 33800.00, 4: 35200.00,
5: 36800.00, 6: 38500.00, 7: 40100.00, 8: 41900.00,
9: 43800.00, 10: 45500.00, 11: 47100.00, 12: decemberSaldo,
}
async def _fakeFetchRows(self, config, accno, fromDate, toDate):
if toDate == "2024-12-31":
return [("1020", priorYearEndSaldo)]
if toDate.startswith("2025-"):
month = int(toDate[5:7])
return [("1020", monthlySaldos[month])]
return []
async def _fakeChart(self, config, accountType=None):
return [type("AC", (), {"accountNumber": "1020"})()]
with patch.object(AccountingConnectorRma, "_fetchSaldoRows", _fakeFetchRows), \
patch.object(AccountingConnectorRma, "getChartOfAccounts", _fakeChart):
balances = await connector.getAccountBalances({"clientName": "test", "apiBaseUrl": "http://x", "apiKey": "k"}, years=[2025])
byPeriod = {(b.accountNumber, b.periodYear, b.periodMonth): b for b in balances}
annual = byPeriod[("1020", 2025, 0)]
assert annual.openingBalance == round(priorYearEndSaldo, 2)
assert annual.closingBalance == round(decemberSaldo, 2)
dec = byPeriod[("1020", 2025, 12)]
assert dec.closingBalance == round(decemberSaldo, 2)
assert dec.openingBalance == round(monthlySaldos[11], 2)
nov = byPeriod[("1020", 2025, 11)]
assert nov.closingBalance == round(monthlySaldos[11], 2)
jan = byPeriod[("1020", 2025, 1)]
assert jan.openingBalance == round(priorYearEndSaldo, 2)
assert jan.closingBalance == round(monthlySaldos[1], 2)
@pytest.mark.asyncio
async def test_incomeStatementAccountResetsToZero(self):
connector = AccountingConnectorRma()
async def _fakeFetchRows(self, config, accno, fromDate, toDate):
if toDate == "2024-12-31":
return [("6000", 99999.99)]
if toDate == "2025-01-31":
return [("6000", 5000.00)]
if toDate == "2025-12-31":
return [("6000", 60000.00)]
return []
async def _fakeChart(self, config, accountType=None):
return [type("AC", (), {"accountNumber": "6000"})()]
with patch.object(AccountingConnectorRma, "_fetchSaldoRows", _fakeFetchRows), \
patch.object(AccountingConnectorRma, "getChartOfAccounts", _fakeChart):
balances = await connector.getAccountBalances({"clientName": "x", "apiBaseUrl": "http://x", "apiKey": "k"}, years=[2025])
byPeriod = {(b.accountNumber, b.periodMonth): b for b in balances if b.periodYear == 2025}
# ER account January opening MUST be 0 (not 99999.99 from prior year)
assert byPeriod[("6000", 1)].openingBalance == 0.0
assert byPeriod[("6000", 0)].openingBalance == 0.0 # annual bucket too

Some files were not shown because too many files have changed in this diff Show more