fixed formgenerator , trustee, sort and filter

This commit is contained in:
ValueOn AG 2026-04-14 00:15:56 +02:00
parent 19f9aa3674
commit 35209f7f80
42 changed files with 1913 additions and 2040 deletions

View file

@ -121,6 +121,21 @@ def _get_model_fields(model_class) -> Dict[str, str]:
return fields
def _get_fk_sort_meta(model_class) -> Dict[str, Dict[str, str]]:
"""Map FK field name -> {model, labelField} from json_schema_extra (fk_model + frontend_fk_display_field)."""
result: Dict[str, Dict[str, str]] = {}
for name, field_info in model_class.model_fields.items():
extra = field_info.json_schema_extra
if not extra or not isinstance(extra, dict):
continue
fk_model = extra.get("fk_model")
label_field = extra.get("frontend_fk_display_field")
if fk_model and label_field:
result[name] = {"model": str(fk_model), "labelField": str(label_field)}
return result
def _parseRecordFields(record: Dict[str, Any], fields: Dict[str, str], context: str = "") -> None:
"""Parse record fields in-place: numeric typing, vector parsing, JSONB deserialization."""
import json as _json
@ -1011,6 +1026,7 @@ class DatabaseConnector:
"""
fields = _get_model_fields(model_class)
validColumns = set(fields.keys())
where_parts: List[str] = []
values: List[Any] = []
@ -1160,10 +1176,10 @@ class DatabaseConnector:
with self.connection.cursor() as cursor:
countSql = f'SELECT COUNT(*) FROM "{table}"{where_clause}'
dataSql = f'SELECT * FROM "{table}"{where_clause}{order_clause}{limit_clause}'
cursor.execute(countSql, count_values)
totalItems = cursor.fetchone()["count"]
dataSql = f'SELECT * FROM "{table}"{where_clause}{order_clause}{limit_clause}'
cursor.execute(dataSql, values)
records = [dict(row) for row in cursor.fetchall()]
@ -1218,10 +1234,11 @@ class DatabaseConnector:
return []
if pagination:
import copy
pagination = copy.deepcopy(pagination)
if pagination.filters and column in pagination.filters:
import copy
pagination = copy.deepcopy(pagination)
pagination.filters.pop(column, None)
pagination.sort = []
where_clause, _, _, values, _ = \
self._buildPaginationClauses(model_class, pagination, recordFilter)

View file

@ -196,12 +196,13 @@ class ActionResult(BaseModel):
success: bool = Field(description="Whether execution succeeded", json_schema_extra={"label": "Erfolg"})
error: Optional[str] = Field(None, description="Error message if failed", json_schema_extra={"label": "Fehler"})
documents: List[ActionDocument] = Field(default_factory=list, description="Document outputs", json_schema_extra={"label": "Dokumente"})
data: Optional[Dict[str, Any]] = Field(None, description="Structured result data accessible via DataRef", json_schema_extra={"label": "Daten"})
resultLabel: Optional[str] = Field(None,
description="Label for document routing (set by action handler, not by action methods)", json_schema_extra={"label": "Ergebnis-Label"})
@classmethod
def isSuccess(cls, documents: List[ActionDocument] = None) -> "ActionResult":
return cls(success=True, documents=documents or [])
def isSuccess(cls, documents: List[ActionDocument] = None, data: Dict[str, Any] = None) -> "ActionResult":
return cls(success=True, documents=documents or [], data=data)
@classmethod
def isFailure(

View file

@ -25,12 +25,28 @@ class FileItem(PowerOnModel):
mandateId: Optional[str] = Field(
default="",
description="ID of the mandate this file belongs to",
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "frontend_fk_source": "/api/mandates/", "frontend_fk_display_field": "label"},
json_schema_extra={
"label": "Mandant",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label",
"fk_model": "Mandate",
},
)
featureInstanceId: Optional[str] = Field(
default="",
description="ID of the feature instance this file belongs to",
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "frontend_fk_source": "/api/features/instances", "frontend_fk_display_field": "label"},
json_schema_extra={
"label": "Feature-Instanz",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"frontend_fk_source": "/api/features/instances",
"frontend_fk_display_field": "label",
"fk_model": "FeatureInstance",
},
)
mimeType: str = Field(
description="MIME type of the file",

View file

@ -26,11 +26,27 @@ class UserMandate(PowerOnModel):
)
userId: str = Field(
description="FK → User.id (CASCADE DELETE)",
json_schema_extra={"label": "Benutzer", "frontend_type": "select", "frontend_readonly": False, "frontend_required": True, "frontend_fk_source": "/api/users/", "frontend_fk_display_field": "username"}
json_schema_extra={
"label": "Benutzer",
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/users/",
"frontend_fk_display_field": "username",
"fk_model": "User",
},
)
mandateId: str = Field(
description="FK → Mandate.id (CASCADE DELETE)",
json_schema_extra={"label": "Mandant", "frontend_type": "select", "frontend_readonly": False, "frontend_required": True, "frontend_fk_source": "/api/mandates/", "frontend_fk_display_field": "label"}
json_schema_extra={
"label": "Mandant",
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label",
"fk_model": "Mandate",
},
)
enabled: bool = Field(
default=True,

View file

@ -5,8 +5,8 @@ Creates a complete demo environment with two mandates, one user,
and all feature instances needed for the investor live demo.
Mandates:
- HappyLife AG (happylife) workspace, trustee(RMA), graphEditor, chatbot, neutralization
- Alpina Treuhand AG (alpina) workspace, trustee(RMA), graphEditor, neutralization
- HappyLife AG (happylife) Dokumentenablage, Buchhaltung, Automationen, Chatbot, Datenschutz
- Alpina Treuhand AG (alpina) Dokumentenablage, 3x Treuhand-Kunden, Automationen, Datenschutz
User:
- Patrick Helvetia (p.motsch@poweron.swiss) SysAdmin, member of both mandates
@ -37,12 +37,25 @@ _USER = {
"username": "patrick.helvetia",
"email": "p.motsch@poweron.swiss",
"fullName": "Patrick Helvetia",
"password": "patrick.helvetia",
"password": "patrick.helvetia.demo",
"language": "en",
}
_FEATURES_HAPPYLIFE = ["workspace", "trustee", "graphicalEditor", "chatbot", "neutralization"]
_FEATURES_ALPINA = ["workspace", "trustee", "graphicalEditor", "neutralization"]
_FEATURES_HAPPYLIFE = [
{"code": "workspace", "label": "Dokumentenablage"},
{"code": "trustee", "label": "Buchhaltung"},
{"code": "graphicalEditor", "label": "Automationen"},
{"code": "chatbot", "label": "Chatbot"},
{"code": "neutralization", "label": "Datenschutz"},
]
_FEATURES_ALPINA = [
{"code": "workspace", "label": "Dokumentenablage"},
{"code": "trustee", "label": "BUHA Müller Immobilien GmbH"},
{"code": "trustee", "label": "BUHA Schneider Gastro AG"},
{"code": "trustee", "label": "BUHA Weber Consulting"},
{"code": "graphicalEditor", "label": "Automationen"},
{"code": "neutralization", "label": "Datenschutz"},
]
class InvestorDemo2026(_BaseDemoConfig):
@ -64,14 +77,17 @@ class InvestorDemo2026(_BaseDemoConfig):
mandateIdAlpina = self._ensureMandate(db, _MANDATE_ALPINA, summary)
userId = self._ensureUser(db, summary)
self._ensureRootMandateSysAdminRole(db, userId, summary)
if mandateIdHappy:
self._ensureMembership(db, userId, mandateIdHappy, _MANDATE_HAPPYLIFE["label"], summary)
self._ensureFeatures(db, mandateIdHappy, _MANDATE_HAPPYLIFE["label"], _FEATURES_HAPPYLIFE, summary)
self._ensureFeatureAccess(db, userId, mandateIdHappy, _MANDATE_HAPPYLIFE["label"], summary)
if mandateIdAlpina:
self._ensureMembership(db, userId, mandateIdAlpina, _MANDATE_ALPINA["label"], summary)
self._ensureFeatures(db, mandateIdAlpina, _MANDATE_ALPINA["label"], _FEATURES_ALPINA, summary)
self._ensureFeatureAccess(db, userId, mandateIdAlpina, _MANDATE_ALPINA["label"], summary)
self._ensureTrusteeRmaConfig(db, mandateIdHappy, _MANDATE_HAPPYLIFE["label"], summary)
self._ensureTrusteeRmaConfig(db, mandateIdAlpina, _MANDATE_ALPINA["label"], summary)
@ -102,6 +118,7 @@ class InvestorDemo2026(_BaseDemoConfig):
existing = db.getRecordset(Mandate, recordFilter={"name": mandateDef["name"]})
for m in existing:
mid = m.get("id")
self._removeMandateData(db, mid, mandateDef["label"], summary)
db.recordDelete(Mandate, mid)
summary["removed"].append(f"Mandate {mandateDef['label']} ({mid})")
logger.info(f"Removed mandate {mandateDef['label']} ({mid})")
@ -178,6 +195,48 @@ class InvestorDemo2026(_BaseDemoConfig):
summary["created"].append(f"User {_USER['fullName']}")
return uid
def _ensureRootMandateSysAdminRole(self, db, userId: str, summary: Dict):
"""Ensure the demo user is member of the root mandate with the sysadmin role.
Without this, hasSysAdminRole returns False and admin menus are hidden."""
from modules.datamodels.datamodelUam import Mandate
from modules.datamodels.datamodelMembership import UserMandate, UserMandateRole
from modules.datamodels.datamodelRbac import Role
rootMandates = db.getRecordset(Mandate, recordFilter={"name": "root", "isSystem": True})
if not rootMandates:
summary["errors"].append("Root mandate not found — cannot assign sysadmin role")
return
rootMandateId = rootMandates[0].get("id")
existing = db.getRecordset(UserMandate, recordFilter={"userId": userId, "mandateId": rootMandateId})
if existing:
userMandateId = existing[0].get("id")
else:
um = UserMandate(userId=userId, mandateId=rootMandateId, enabled=True)
created = db.recordCreate(UserMandate, um)
userMandateId = created.get("id")
summary["created"].append("Membership -> root mandate")
logger.info(f"Created root mandate membership for {_USER['username']}")
sysadminRoles = db.getRecordset(Role, recordFilter={"mandateId": rootMandateId, "roleLabel": "sysadmin"})
if not sysadminRoles:
summary["errors"].append("sysadmin role not found in root mandate")
return
sysadminRoleId = sysadminRoles[0].get("id")
existingRole = db.getRecordset(UserMandateRole, recordFilter={
"userMandateId": userMandateId,
"roleId": sysadminRoleId,
})
if not existingRole:
umr = UserMandateRole(userMandateId=userMandateId, roleId=sysadminRoleId)
db.recordCreate(UserMandateRole, umr)
summary["created"].append("SysAdmin role in root mandate")
logger.info(f"Assigned sysadmin role in root mandate for {_USER['username']}")
else:
summary["skipped"].append("SysAdmin role in root mandate exists")
def _ensureMembership(self, db, userId: str, mandateId: str, mandateLabel: str, summary: Dict):
from modules.datamodels.datamodelMembership import UserMandate, UserMandateRole
from modules.datamodels.datamodelRbac import Role
@ -202,33 +261,76 @@ class InvestorDemo2026(_BaseDemoConfig):
db.recordCreate(UserMandateRole, umr)
logger.info(f"Assigned admin role in {mandateLabel}")
def _ensureFeatures(self, db, mandateId: str, mandateLabel: str, featureCodes: List[str], summary: Dict):
def _ensureFeatures(self, db, mandateId: str, mandateLabel: str, featureDefs: List[Dict], summary: Dict):
from modules.interfaces.interfaceFeatures import getFeatureInterface
fi = getFeatureInterface(db)
existingInstances = fi.getFeatureInstancesForMandate(mandateId)
existingCodes = {
(inst.featureCode if hasattr(inst, "featureCode") else inst.get("featureCode", ""))
existingLabels = {
(inst.label if hasattr(inst, "label") else inst.get("label", ""))
for inst in existingInstances
}
for code in featureCodes:
if code in existingCodes:
summary["skipped"].append(f"Feature {code} in {mandateLabel} exists")
for featureDef in featureDefs:
code = featureDef["code"]
instanceLabel = featureDef["label"]
if instanceLabel in existingLabels:
summary["skipped"].append(f"Feature '{instanceLabel}' in {mandateLabel} exists")
continue
try:
fi.createFeatureInstance(
featureCode=code,
mandateId=mandateId,
label=f"{code} ({mandateLabel})",
label=instanceLabel,
enabled=True,
copyTemplateRoles=True,
)
summary["created"].append(f"Feature {code} in {mandateLabel}")
logger.info(f"Created feature instance {code} in {mandateLabel}")
summary["created"].append(f"Feature '{instanceLabel}' in {mandateLabel}")
logger.info(f"Created feature instance '{instanceLabel}' ({code}) in {mandateLabel}")
except Exception as e:
summary["errors"].append(f"Feature {code} in {mandateLabel}: {e}")
logger.error(f"Failed to create feature {code} in {mandateLabel}: {e}")
summary["errors"].append(f"Feature '{instanceLabel}' in {mandateLabel}: {e}")
logger.error(f"Failed to create feature '{instanceLabel}' ({code}) in {mandateLabel}: {e}")
def _ensureFeatureAccess(self, db, userId: str, mandateId: str, mandateLabel: str, summary: Dict):
"""Grant the demo user admin access to every feature instance in the mandate."""
from modules.datamodels.datamodelFeatures import FeatureInstance
from modules.datamodels.datamodelMembership import FeatureAccess, FeatureAccessRole
from modules.datamodels.datamodelRbac import Role
instances = db.getRecordset(FeatureInstance, recordFilter={"mandateId": mandateId}) or []
for inst in instances:
instId = inst.get("id")
featureCode = inst.get("featureCode", "")
if not instId:
continue
existing = db.getRecordset(FeatureAccess, recordFilter={"userId": userId, "featureInstanceId": instId})
if existing:
featureAccessId = existing[0].get("id")
summary["skipped"].append(f"FeatureAccess {featureCode} in {mandateLabel} exists")
else:
fa = FeatureAccess(userId=userId, featureInstanceId=instId, enabled=True)
created = db.recordCreate(FeatureAccess, fa)
featureAccessId = created.get("id")
summary["created"].append(f"FeatureAccess {featureCode} in {mandateLabel}")
logger.info(f"Created feature access for {featureCode} in {mandateLabel}")
adminRoleLabel = f"{featureCode}-admin"
adminRoles = db.getRecordset(Role, recordFilter={
"featureInstanceId": instId,
"roleLabel": adminRoleLabel,
})
if adminRoles:
adminRoleId = adminRoles[0].get("id")
existingRole = db.getRecordset(FeatureAccessRole, recordFilter={
"featureAccessId": featureAccessId,
"roleId": adminRoleId,
})
if not existingRole:
far = FeatureAccessRole(featureAccessId=featureAccessId, roleId=adminRoleId)
db.recordCreate(FeatureAccessRole, far)
logger.info(f"Assigned {adminRoleLabel} role in {mandateLabel}")
def _ensureTrusteeRmaConfig(self, db, mandateId: Optional[str], mandateLabel: str, summary: Dict):
if not mandateId:
@ -336,6 +438,157 @@ class InvestorDemo2026(_BaseDemoConfig):
except Exception as e:
summary["errors"].append(f"Billing for {mandateLabel}: {e}")
def _removeMandateData(self, db, mandateId: str, mandateLabel: str, summary: Dict):
"""Remove all data scoped to a mandate before deleting the mandate itself."""
from modules.datamodels.datamodelFeatures import FeatureInstance
from modules.datamodels.datamodelMembership import UserMandate, UserMandateRole, FeatureAccess, FeatureAccessRole
from modules.datamodels.datamodelRbac import Role, AccessRule
from modules.datamodels.datamodelChat import ChatWorkflow, ChatMessage, ChatLog
from modules.datamodels.datamodelBilling import BillingSettings
instances = db.getRecordset(FeatureInstance, recordFilter={"mandateId": mandateId}) or []
for inst in instances:
instId = inst.get("id")
featureCode = inst.get("featureCode", "")
if not instId:
continue
if featureCode == "graphicalEditor":
self._removeGraphicalEditorData(instId, mandateId, mandateLabel, summary)
if featureCode == "trustee":
self._removeTrusteeData(db, instId, mandateLabel, summary)
if featureCode == "neutralization":
self._removeNeutralizationData(db, instId, mandateLabel, summary)
chatWorkflows = db.getRecordset(ChatWorkflow, recordFilter={"featureInstanceId": instId}) or []
for wf in chatWorkflows:
wfId = wf.get("id")
if not wfId:
continue
for msg in db.getRecordset(ChatMessage, recordFilter={"workflowId": wfId}) or []:
db.recordDelete(ChatMessage, msg.get("id"))
for log in db.getRecordset(ChatLog, recordFilter={"workflowId": wfId}) or []:
db.recordDelete(ChatLog, log.get("id"))
db.recordDelete(ChatWorkflow, wfId)
if chatWorkflows:
summary["removed"].append(f"{len(chatWorkflows)} ChatWorkflows in {mandateLabel}")
accesses = db.getRecordset(FeatureAccess, recordFilter={"featureInstanceId": instId}) or []
for access in accesses:
for role in db.getRecordset(FeatureAccessRole, recordFilter={"featureAccessId": access.get("id")}) or []:
db.recordDelete(FeatureAccessRole, role.get("id"))
db.recordDelete(FeatureAccess, access.get("id"))
db.recordDelete(FeatureInstance, instId)
summary["removed"].append(f"FeatureInstance {featureCode} in {mandateLabel}")
logger.info(f"Removed feature instance {featureCode} ({instId}) in {mandateLabel}")
memberships = db.getRecordset(UserMandate, recordFilter={"mandateId": mandateId}) or []
for um in memberships:
for umr in db.getRecordset(UserMandateRole, recordFilter={"userMandateId": um.get("id")}) or []:
db.recordDelete(UserMandateRole, umr.get("id"))
db.recordDelete(UserMandate, um.get("id"))
if memberships:
summary["removed"].append(f"{len(memberships)} memberships in {mandateLabel}")
roles = db.getRecordset(Role, recordFilter={"mandateId": mandateId}) or []
for role in roles:
for rule in db.getRecordset(AccessRule, recordFilter={"roleId": role.get("id")}) or []:
db.recordDelete(AccessRule, rule.get("id"))
db.recordDelete(Role, role.get("id"))
if roles:
summary["removed"].append(f"{len(roles)} roles in {mandateLabel}")
try:
from modules.interfaces.interfaceDbBilling import _getRootInterface
billingDb = _getRootInterface().db
billingSettings = billingDb.getRecordset(BillingSettings, recordFilter={"mandateId": mandateId}) or []
for bs in billingSettings:
billingDb.recordDelete(BillingSettings, bs.get("id"))
if billingSettings:
summary["removed"].append(f"BillingSettings in {mandateLabel}")
except Exception as e:
summary["errors"].append(f"Billing cleanup for {mandateLabel}: {e}")
def _removeGraphicalEditorData(self, featureInstanceId: str, mandateId: str, mandateLabel: str, summary: Dict):
"""Remove all AutoWorkflow data (workflows, runs, versions, logs, tasks) from the Greenfield DB."""
try:
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import (
AutoWorkflow, AutoVersion, AutoRun, AutoStepLog, AutoTask,
)
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG
geDb = DatabaseConnector(
dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
dbDatabase="poweron_graphicaleditor",
dbUser=APP_CONFIG.get("DB_USER"),
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"),
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),
userId=None,
)
workflows = geDb.getRecordset(AutoWorkflow, recordFilter={
"mandateId": mandateId,
"featureInstanceId": featureInstanceId,
}) or []
for wf in workflows:
wfId = wf.get("id")
if not wfId:
continue
for version in geDb.getRecordset(AutoVersion, recordFilter={"workflowId": wfId}) or []:
geDb.recordDelete(AutoVersion, version.get("id"))
runs = geDb.getRecordset(AutoRun, recordFilter={"workflowId": wfId}) or []
for run in runs:
runId = run.get("id")
for stepLog in geDb.getRecordset(AutoStepLog, recordFilter={"runId": runId}) or []:
geDb.recordDelete(AutoStepLog, stepLog.get("id"))
geDb.recordDelete(AutoRun, runId)
for task in geDb.getRecordset(AutoTask, recordFilter={"workflowId": wfId}) or []:
geDb.recordDelete(AutoTask, task.get("id"))
geDb.recordDelete(AutoWorkflow, wfId)
if workflows:
summary["removed"].append(f"{len(workflows)} AutoWorkflows in {mandateLabel}")
logger.info(f"Removed {len(workflows)} graphical editor workflows for {mandateLabel}")
except Exception as e:
summary["errors"].append(f"GraphicalEditor cleanup for {mandateLabel}: {e}")
logger.error(f"Failed to clean up graphical editor data for {mandateLabel}: {e}")
def _removeTrusteeData(self, db, featureInstanceId: str, mandateLabel: str, summary: Dict):
"""Remove TrusteeAccountingConfig for a feature instance."""
try:
from modules.features.trustee.datamodelFeatureTrustee import TrusteeAccountingConfig
configs = db.getRecordset(TrusteeAccountingConfig, recordFilter={"featureInstanceId": featureInstanceId}) or []
for cfg in configs:
db.recordDelete(TrusteeAccountingConfig, cfg.get("id"))
if configs:
summary["removed"].append(f"TrusteeAccountingConfig in {mandateLabel}")
except Exception as e:
summary["errors"].append(f"Trustee cleanup for {mandateLabel}: {e}")
def _removeNeutralizationData(self, db, featureInstanceId: str, mandateLabel: str, summary: Dict):
"""Remove DataNeutraliserConfig for a feature instance."""
try:
from modules.features.neutralization.datamodelFeatureNeutralizer import DataNeutraliserConfig
configs = db.getRecordset(DataNeutraliserConfig, recordFilter={"featureInstanceId": featureInstanceId}) or []
for cfg in configs:
db.recordDelete(DataNeutraliserConfig, cfg.get("id"))
if configs:
summary["removed"].append(f"DataNeutraliserConfig in {mandateLabel}")
except Exception as e:
summary["errors"].append(f"Neutralization cleanup for {mandateLabel}: {e}")
def _removeLanguageSet(self, db, code: str, summary: Dict):
"""Remove a language set if it was created during demo (e.g. 'es' from UC4)."""
try:

View file

@ -63,11 +63,27 @@ class AutoWorkflow(PowerOnModel):
)
mandateId: str = Field(
description="Mandate ID",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "label": "Mandanten-ID"},
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Mandanten-ID",
"frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label",
"fk_model": "Mandate",
},
)
featureInstanceId: str = Field(
description="Feature instance ID",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "label": "Feature-Instanz-ID"},
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Feature-Instanz-ID",
"frontend_fk_source": "/api/features/instances",
"frontend_fk_display_field": "label",
"fk_model": "FeatureInstance",
},
)
label: str = Field(
description="User-friendly workflow name",
@ -206,7 +222,15 @@ class AutoRun(PowerOnModel):
mandateId: Optional[str] = Field(
default=None,
description="Mandate ID for cross-feature querying",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "label": "Mandanten-ID"},
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Mandanten-ID",
"frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label",
"fk_model": "Mandate",
},
)
ownerId: Optional[str] = Field(
default=None,

View file

@ -15,10 +15,16 @@ AI_NODES = [
{"name": "outputFormat", "type": "string", "required": False, "frontendType": "select",
"frontendOptions": {"options": ["text", "json", "emailDraft"]},
"description": t("Ausgabeformat"), "default": "text"},
{"name": "documentList", "type": "string", "required": False, "frontendType": "hidden",
"description": t("Dokumentenliste (via Wire oder DataRef)"), "default": ""},
{"name": "context", "type": "string", "required": False, "frontendType": "hidden",
"description": t("Kontext-Daten (via Wire oder DataRef)"), "default": ""},
{"name": "simpleMode", "type": "boolean", "required": False, "frontendType": "checkbox",
"description": t("Einfacher Modus"), "default": True},
],
"inputs": 1,
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"inputPorts": {0: {"accepts": ["DocumentList", "AiResult", "TextResult", "Transit"]}},
"outputPorts": {0: {"schema": "AiResult"}},
"meta": {"icon": "mdi-robot", "color": "#9C27B0"},
"_method": "ai",

View file

@ -23,6 +23,8 @@ FILE_NODES = [
{"name": "language", "type": "string", "required": False, "frontendType": "select",
"frontendOptions": {"options": ["de", "en", "fr"]},
"description": t("Sprache"), "default": "de"},
{"name": "context", "type": "string", "required": False, "frontendType": "hidden",
"description": t("Inhalt (via Wire oder DataRef)"), "default": ""},
],
"inputs": 1,
"outputs": 1,

View file

@ -14,7 +14,7 @@ from fastapi import APIRouter, Depends, Path, Query, Body, Request, HTTPExceptio
from fastapi.responses import JSONResponse, StreamingResponse
from modules.auth import limiter, getRequestContext, RequestContext
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
from modules.routes.routeDataUsers import _applyFiltersAndSort
from modules.routes.routeHelpers import _applyFiltersAndSort
from modules.features.graphicalEditor.mainGraphicalEditor import getGraphicalEditorServices
from modules.features.graphicalEditor.nodeRegistry import getNodeTypesForApi

View file

@ -579,12 +579,14 @@ class NeutralizationService:
) -> Dict[str, Any]:
"""Extract -> neutralize -> adapt -> generate for PDF/DOCX/XLSX/PPTX."""
from modules.serviceCenter.services.serviceExtraction.mainServiceExtraction import ExtractionService
from modules.serviceCenter.services.serviceExtraction.subRegistry import ExtractorRegistry, ChunkerRegistry
from modules.serviceCenter.services.serviceExtraction.subPipeline import runExtraction
from modules.datamodels.datamodelExtraction import ExtractionOptions, MergeStrategy
# Ensure registries exist
if ExtractionService._sharedExtractorRegistry is None:
ExtractionService(self.services)
ExtractionService._sharedExtractorRegistry = ExtractorRegistry()
if ExtractionService._sharedChunkerRegistry is None:
ExtractionService._sharedChunkerRegistry = ChunkerRegistry()
registry = ExtractionService._sharedExtractorRegistry
chunker = ExtractionService._sharedChunkerRegistry
opts = ExtractionOptions(prompt="neutralize", mergeStrategy=MergeStrategy(preserveChunks=True))

View file

@ -216,6 +216,8 @@ def get_projects(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[Projekt]:
"""Get all projects for a feature instance with optional pagination."""
@ -224,6 +226,17 @@ def get_projects(
context.user, mandateId=mandateId, featureInstanceId=instanceId
)
recordFilter = {"featureInstanceId": instanceId}
if mode in ("filterValues", "ids"):
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
items = interface.getProjekte(recordFilter=recordFilter)
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
return handleFilterValuesInMemory(itemDicts, column, pagination)
return handleIdsInMemory(itemDicts, pagination)
items = interface.getProjekte(recordFilter=recordFilter)
paginationParams = _parsePagination(pagination)
if paginationParams:
@ -254,31 +267,6 @@ def get_projects(
return PaginatedResponse(items=items, pagination=None)
@router.get("/{instanceId}/projects/filter-values")
@limiter.limit("60/minute")
def get_project_filter_values(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
context: RequestContext = Depends(getRequestContext)
) -> list:
"""Return distinct filter values for a column in real estate projects."""
mandateId = _validateInstanceAccess(instanceId, context)
try:
from modules.routes.routeDataUsers import _handleFilterValuesRequest
interface = getRealEstateInterface(
context.user, mandateId=mandateId, featureInstanceId=instanceId
)
recordFilter = {"featureInstanceId": instanceId}
items = interface.getProjekte(recordFilter=recordFilter)
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
return _handleFilterValuesRequest(itemDicts, column, pagination)
except Exception as e:
logger.error(f"Error getting filter values for projects: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/{instanceId}/projects/{projectId}", response_model=Projekt)
@limiter.limit("30/minute")
def get_project_by_id(
@ -373,6 +361,8 @@ def get_parcels(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[Parzelle]:
"""Get all parcels for a feature instance with optional pagination."""
@ -381,6 +371,17 @@ def get_parcels(
context.user, mandateId=mandateId, featureInstanceId=instanceId
)
recordFilter = {"featureInstanceId": instanceId}
if mode in ("filterValues", "ids"):
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
items = interface.getParzellen(recordFilter=recordFilter)
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
return handleFilterValuesInMemory(itemDicts, column, pagination)
return handleIdsInMemory(itemDicts, pagination)
items = interface.getParzellen(recordFilter=recordFilter)
paginationParams = _parsePagination(pagination)
if paginationParams:
@ -411,31 +412,6 @@ def get_parcels(
return PaginatedResponse(items=items, pagination=None)
@router.get("/{instanceId}/parcels/filter-values")
@limiter.limit("60/minute")
def get_parcel_filter_values(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
context: RequestContext = Depends(getRequestContext)
) -> list:
"""Return distinct filter values for a column in real estate parcels."""
mandateId = _validateInstanceAccess(instanceId, context)
try:
from modules.routes.routeDataUsers import _handleFilterValuesRequest
interface = getRealEstateInterface(
context.user, mandateId=mandateId, featureInstanceId=instanceId
)
recordFilter = {"featureInstanceId": instanceId}
items = interface.getParzellen(recordFilter=recordFilter)
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
return _handleFilterValuesRequest(itemDicts, column, pagination)
except Exception as e:
logger.error(f"Error getting filter values for parcels: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/{instanceId}/parcels/{parcelId}", response_model=Parzelle)
@limiter.limit("30/minute")
def get_parcel_by_id(

View file

@ -327,14 +327,18 @@ QUICK_ACTIONS = [
# ---------------------------------------------------------------------------
def _buildAnalysisWorkflowGraph(prompt: str) -> Dict[str, Any]:
"""Build a standard analysis graph: trigger → refreshAccountingData → ai.prompt."""
"""Build a standard analysis graph: trigger -> refreshAccountingData -> ai.prompt."""
return {
"nodes": [
{"id": "trigger", "type": "trigger.manual", "label": "Start", "_method": "", "_action": "", "parameters": {}, "position": {"x": 0, "y": 0}},
{"id": "refresh", "type": "trustee.refreshAccountingData", "label": "Daten laden", "_method": "trustee", "_action": "refreshAccountingData",
"parameters": {"featureInstanceId": "{{featureInstanceId}}", "forceRefresh": False}, "position": {"x": 250, "y": 0}},
{"id": "analyse", "type": "ai.prompt", "label": "Analyse", "_method": "ai", "_action": "process",
"parameters": {"prompt": prompt, "simpleMode": False}, "position": {"x": 500, "y": 0}},
"parameters": {
"aiPrompt": prompt,
"context": {"type": "ref", "nodeId": "refresh", "path": ["data", "accountingData"]},
"simpleMode": False,
}, "position": {"x": 500, "y": 0}},
],
"connections": [
{"source": "trigger", "sourcePort": 0, "target": "refresh", "targetPort": 0},
@ -387,15 +391,33 @@ TEMPLATE_WORKFLOWS = [
"label": "Budget-Vergleich",
"description": "Soll/Ist-Vergleich der Buchhaltung mit Budget-Excel",
"tags": ["feature:trustee", "template:trustee-budget-comparison"],
"graph": _buildAnalysisWorkflowGraph(
"Ich möchte einen Budget-Soll/Ist-Vergleich durchführen. Bitte:\n"
"1. Frage mich nach der Budget-Datei (Excel) oder suche im Workspace nach einer Datei mit 'Budget' im Namen\n"
"2. Lade die aktuellen Buchhaltungsdaten (refreshTrusteeData falls nötig)\n"
"3. Vergleiche die Soll-Werte aus dem Budget mit den Ist-Werten aus der Buchhaltung pro Konto\n"
"4. Berechne die Abweichung (absolut und prozentual)\n"
"5. Erstelle ein Abweichungs-Chart (Balkendiagramm: Soll vs. Ist pro Konto)\n"
"6. Markiere kritische Abweichungen (>10%) und gib eine kurze Einschätzung"
),
"graph": {
"nodes": [
{"id": "trigger", "type": "trigger.manual", "label": "Start", "_method": "", "_action": "", "parameters": {}, "position": {"x": 0, "y": 0}},
{"id": "refresh", "type": "trustee.refreshAccountingData", "label": "Daten laden", "_method": "trustee", "_action": "refreshAccountingData",
"parameters": {"featureInstanceId": "{{featureInstanceId}}", "forceRefresh": False}, "position": {"x": 250, "y": 0}},
{"id": "analyse", "type": "ai.prompt", "label": "Budget-Analyse", "_method": "ai", "_action": "process",
"parameters": {
"aiPrompt": (
"Fuehre einen Budget-Soll/Ist-Vergleich durch.\n"
"Die Budget-Datei (Excel) wurde als Dokument uebergeben. "
"Die aktuellen Buchhaltungsdaten sind im Kontext verfuegbar.\n"
"1. Lies die Soll-Werte aus dem uebergebenen Budget-Dokument\n"
"2. Vergleiche sie mit den Ist-Werten aus der Buchhaltung pro Konto\n"
"3. Berechne die Abweichung (absolut und prozentual)\n"
"4. Erstelle ein Abweichungs-Chart (Balkendiagramm: Soll vs. Ist pro Konto)\n"
"5. Markiere kritische Abweichungen (>10%) und gib eine kurze Einschaetzung"
),
"documentList": {"type": "ref", "nodeId": "trigger", "path": ["payload", "documentList"]},
"context": {"type": "ref", "nodeId": "refresh", "path": ["data", "accountingData"]},
"simpleMode": False,
}, "position": {"x": 500, "y": 0}},
],
"connections": [
{"source": "trigger", "sourcePort": 0, "target": "refresh", "targetPort": 0},
{"source": "refresh", "sourcePort": 0, "target": "analyse", "targetPort": 0},
],
},
},
{
"id": "trustee-kpi-dashboard",

View file

@ -897,11 +897,16 @@ def get_documents(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
pagination: Optional[str] = Query(None),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[TrusteeDocument]:
"""Get all documents (metadata only) with optional pagination."""
mandateId = _validateInstanceAccess(instanceId, context)
if mode in ("filterValues", "ids"):
return _handleDocumentMode(instanceId, mandateId, mode, column, pagination, context)
paginationParams = _parsePagination(pagination)
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllDocuments(paginationParams)
@ -921,36 +926,18 @@ def get_documents(
return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
@router.get("/{instanceId}/documents/filter-values")
@limiter.limit("60/minute")
def get_document_filter_values(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
context: RequestContext = Depends(getRequestContext)
) -> list:
"""Return distinct filter values for a column in trustee documents."""
mandateId = _validateInstanceAccess(instanceId, context)
try:
from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
crossFilterPagination = None
if pagination:
try:
paginationDict = json.loads(pagination)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
filters = paginationDict.get("filters", {})
filters.pop(column, None)
paginationDict["filters"] = filters
paginationDict.pop("sort", None)
crossFilterPagination = PaginationParams(**paginationDict)
except (json.JSONDecodeError, ValueError):
pass
def _handleDocumentMode(instanceId, mandateId, mode, column, pagination, context):
"""Handle mode=filterValues and mode=ids for trustee documents."""
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
try:
from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
from modules.routes.routeHelpers import parseCrossFilterPagination
crossFilterPagination = parseCrossFilterPagination(column, pagination)
from fastapi.responses import JSONResponse
values = getDistinctColumnValuesWithRBAC(
connector=interface.db,
modelClass=TrusteeDocument,
@ -962,15 +949,17 @@ def get_document_filter_values(
featureInstanceId=interface.featureInstanceId,
featureCode=interface.FEATURE_CODE
)
return sorted(values, key=lambda v: str(v).lower())
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
except Exception:
from modules.routes.routeDataUsers import _handleFilterValuesRequest
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllDocuments(None)
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
return _handleFilterValuesRequest(items, column, pagination)
except Exception as e:
logger.error(f"Error getting filter values for trustee documents: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
return handleFilterValuesInMemory(items, column, pagination)
if mode == "ids":
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllDocuments(None)
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
return handleIdsInMemory(items, pagination)
@router.get("/{instanceId}/documents/{documentId}", response_model=TrusteeDocument)
@ -1153,11 +1142,16 @@ def get_positions(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
pagination: Optional[str] = Query(None),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[TrusteePosition]:
"""Get all positions with optional pagination."""
mandateId = _validateInstanceAccess(instanceId, context)
if mode in ("filterValues", "ids"):
return _handlePositionMode(instanceId, mandateId, mode, column, pagination, context)
paginationParams = _parsePagination(pagination)
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllPositions(paginationParams)
@ -1177,36 +1171,18 @@ def get_positions(
return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
@router.get("/{instanceId}/positions/filter-values")
@limiter.limit("60/minute")
def get_position_filter_values(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
context: RequestContext = Depends(getRequestContext)
) -> list:
"""Return distinct filter values for a column in trustee positions."""
mandateId = _validateInstanceAccess(instanceId, context)
try:
from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
crossFilterPagination = None
if pagination:
try:
paginationDict = json.loads(pagination)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
filters = paginationDict.get("filters", {})
filters.pop(column, None)
paginationDict["filters"] = filters
paginationDict.pop("sort", None)
crossFilterPagination = PaginationParams(**paginationDict)
except (json.JSONDecodeError, ValueError):
pass
def _handlePositionMode(instanceId, mandateId, mode, column, pagination, context):
"""Handle mode=filterValues and mode=ids for trustee positions."""
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
try:
from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
from modules.routes.routeHelpers import parseCrossFilterPagination
crossFilterPagination = parseCrossFilterPagination(column, pagination)
from fastapi.responses import JSONResponse
values = getDistinctColumnValuesWithRBAC(
connector=interface.db,
modelClass=TrusteePosition,
@ -1218,15 +1194,17 @@ def get_position_filter_values(
featureInstanceId=interface.featureInstanceId,
featureCode=interface.FEATURE_CODE
)
return sorted(values, key=lambda v: str(v).lower())
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
except Exception:
from modules.routes.routeDataUsers import _handleFilterValuesRequest
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllPositions(None)
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
return _handleFilterValuesRequest(items, column, pagination)
except Exception as e:
logger.error(f"Error getting filter values for trustee positions: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
return handleFilterValuesInMemory(items, column, pagination)
if mode == "ids":
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllPositions(None)
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
return handleIdsInMemory(items, pagination)
@router.get("/{instanceId}/positions/{positionId}", response_model=TrusteePosition)

View file

@ -97,38 +97,8 @@ def initBootstrap(db: DatabaseConnector) -> None:
# Apply multi-tenant database optimizations (indexes, triggers, FKs)
_applyDatabaseOptimizations(db)
# Run root-user migration (one-time, sets completion flag)
migrationDone = False
try:
from modules.migration.migrateRootUsers import migrateRootUsers, _isMigrationCompleted
migrationDone = _isMigrationCompleted(db)
if not migrationDone:
# Create root instances first (needed for migration), then migrate
if mandateId:
initRootMandateFeatures(db, mandateId)
result = migrateRootUsers(db)
migrationDone = result.get("status") != "error"
else:
migrationDone = True
except Exception as e:
logger.error(f"Root user migration failed: {e}")
# Run voice & documents migration (one-time, sets completion flag)
try:
from modules.migration.migrateVoiceAndDocuments import migrateVoiceAndDocuments
migrateVoiceAndDocuments(db)
except Exception as e:
logger.error(f"Voice & documents migration failed: {e}")
# Backfill FileContentIndex scope fields from FileItem (one-time)
try:
from modules.migration.migrateRagScopeFields import runMigration as migrateRagScope
migrateRagScope(appDb=db)
except Exception as e:
logger.error(f"RAG scope fields migration failed: {e}")
# After migration: root mandate is purely technical — no feature instances
if not migrationDone and mandateId:
# Initialize root mandate feature instances
if mandateId:
initRootMandateFeatures(db, mandateId)
# Remove feature instances for features that no longer exist in the codebase
@ -307,10 +277,11 @@ def initRootMandateFeatures(db: DatabaseConnector, mandateId: str) -> None:
for featureName, module in mainModules.items():
if hasattr(module, "getFeatureDefinition"):
try:
from modules.shared.i18nRegistry import resolveText
featureDef = module.getFeatureDefinition()
if featureDef.get("autoCreateInstance", False):
featureCode = featureDef.get("code", featureName)
featureLabel = featureDef.get("label", {}).get("en", featureName)
featureLabel = resolveText(featureDef.get("label", featureName))
featuresToCreate.append({"code": featureCode, "label": featureLabel})
logger.debug(f"Feature '{featureCode}' marked for auto-creation in root mandate")
except Exception as e:

View file

@ -1728,6 +1728,9 @@ class AppObjects:
instances = self.db.getRecordset(FeatureInstance, recordFilter={"mandateId": mandateId})
# 0-pre. Delete AutoWorkflow data in Greenfield DB (poweron_graphicaleditor)
self._cascadeDeleteGraphicalEditorData(mandateId, instances)
# 0. Delete instance-scoped data for each FeatureInstance
for inst in instances:
instId = inst.get("id")
@ -1869,6 +1872,67 @@ class AppObjects:
logger.error(f"Error deleting mandate: {str(e)}")
raise ValueError(f"Failed to delete mandate: {str(e)}")
def _cascadeDeleteGraphicalEditorData(self, mandateId: str, instances) -> None:
"""Delete AutoWorkflow + related data in the Greenfield DB for all graphicalEditor instances."""
try:
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import (
AutoWorkflow, AutoVersion, AutoRun, AutoStepLog, AutoTask,
)
from modules.connectors.connectorDbPostgre import DatabaseConnector
geDb = DatabaseConnector(
dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
dbDatabase="poweron_graphicaleditor",
dbUser=APP_CONFIG.get("DB_USER"),
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"),
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),
userId=None,
)
if not geDb._ensureTableExists(AutoWorkflow):
return
geInstances = [
inst for inst in instances
if (inst.get("featureCode") if isinstance(inst, dict) else getattr(inst, "featureCode", "")) == "graphicalEditor"
]
totalDeleted = 0
for inst in geInstances:
instId = inst.get("id") if isinstance(inst, dict) else getattr(inst, "id", None)
if not instId:
continue
workflows = geDb.getRecordset(AutoWorkflow, recordFilter={
"mandateId": mandateId,
"featureInstanceId": instId,
}) or []
for wf in workflows:
wfId = wf.get("id")
if not wfId:
continue
for v in geDb.getRecordset(AutoVersion, recordFilter={"workflowId": wfId}) or []:
geDb.recordDelete(AutoVersion, v.get("id"))
for run in geDb.getRecordset(AutoRun, recordFilter={"workflowId": wfId}) or []:
runId = run.get("id")
for sl in geDb.getRecordset(AutoStepLog, recordFilter={"runId": runId}) or []:
geDb.recordDelete(AutoStepLog, sl.get("id"))
geDb.recordDelete(AutoRun, runId)
for task in geDb.getRecordset(AutoTask, recordFilter={"workflowId": wfId}) or []:
geDb.recordDelete(AutoTask, task.get("id"))
geDb.recordDelete(AutoWorkflow, wfId)
totalDeleted += 1
if totalDeleted:
logger.info(f"Cascade: deleted {totalDeleted} AutoWorkflow(s) in Greenfield DB for mandate {mandateId}")
except Exception as e:
logger.warning(f"Failed to cascade-delete graphical editor data for mandate {mandateId}: {e}")
def restoreMandate(self, mandateId: str) -> bool:
"""Restore a soft-deleted mandate (undo soft-delete within the 30-day retention window)."""
mandate = self.getMandate(mandateId)

View file

@ -585,7 +585,7 @@ def aggregateMandateRagTotalBytes(mandateId: str) -> int:
# DEPRECATED: file-ID-correlation fallback from poweron_management.
# Only needed for pre-migration data where mandateId/featureInstanceId on the
# FileContentIndex are empty. Remove once migrateRagScopeFields has been run.
# FileContentIndex are empty. Safe to remove once all environments are migrated.
_fallbackCount = 0
try:
from modules.datamodels.datamodelFiles import FileItem

View file

@ -232,7 +232,11 @@ class FeatureInterface:
import importlib
try:
featureModule = importlib.import_module(f"modules.features.{featureCode}.main{featureCode.capitalize()}")
from modules.system.registry import loadFeatureMainModules
mainModules = loadFeatureMainModules()
featureModule = mainModules.get(featureCode)
if not featureModule:
return 0
getTemplateWorkflows = getattr(featureModule, "getTemplateWorkflows", None)
if not getTemplateWorkflows:
return 0

View file

@ -1,114 +0,0 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""
Migration: Backfill FileContentIndex scope fields from FileItem (Single Source of Truth).
Fixes legacy rows in poweron_knowledge where scope/mandateId/featureInstanceId
are empty or default ("personal") despite the corresponding FileItem having correct values.
Idempotent safe to run multiple times. Uses a DB flag to skip if already completed.
"""
import logging
from modules.shared.configuration import APP_CONFIG
from modules.connectors.connectorDbPostgre import _get_cached_connector
logger = logging.getLogger(__name__)
_MIGRATION_FLAG_KEY = "migration_rag_scope_fields_completed"
def _isMigrationCompleted(appDb) -> bool:
try:
from modules.datamodels.datamodelUam import Mandate
records = appDb.getRecordset(Mandate, recordFilter={"name": _MIGRATION_FLAG_KEY})
return len(records) > 0
except Exception:
return False
def _setMigrationCompleted(appDb) -> None:
try:
from modules.datamodels.datamodelUam import Mandate
flag = Mandate(name=_MIGRATION_FLAG_KEY, description="RAG scope fields migration completed")
appDb.recordCreate(Mandate, flag)
except Exception as e:
logger.error("Could not set migration flag: %s", e)
def runMigration(appDb=None) -> dict:
"""Backfill FileContentIndex rows from FileItem metadata.
Returns dict with counts: {total, updated, skipped, orphaned}.
"""
from modules.datamodels.datamodelKnowledge import FileContentIndex
from modules.datamodels.datamodelFiles import FileItem
from modules.interfaces.interfaceDbKnowledge import getInterface as getKnowledgeInterface
from modules.interfaces.interfaceDbManagement import ComponentObjects
if appDb is None:
from modules.interfaces.interfaceDbApp import getRootInterface
appDb = getRootInterface().db
if _isMigrationCompleted(appDb):
logger.info("migrateRagScopeFields: already completed, skipping")
return {"total": 0, "updated": 0, "skipped": 0, "orphaned": 0}
knowDb = getKnowledgeInterface(None).db
mgmtDb = ComponentObjects().db
allIndexes = knowDb.getRecordset(FileContentIndex, recordFilter={})
total = len(allIndexes)
updated = 0
skipped = 0
orphaned = 0
logger.info("migrateRagScopeFields: processing %d FileContentIndex rows", total)
for idx in allIndexes:
idxId = idx.get("id") if isinstance(idx, dict) else getattr(idx, "id", None)
if not idxId:
skipped += 1
continue
fileItem = mgmtDb._loadRecord(FileItem, str(idxId))
if not fileItem:
orphaned += 1
continue
_get = (lambda k, d="": fileItem.get(k, d)) if isinstance(fileItem, dict) else (lambda k, d="": getattr(fileItem, k, d))
fiScope = _get("scope") or "personal"
fiMandateId = str(_get("mandateId") or "")
fiFeatureInstanceId = str(_get("featureInstanceId") or "")
idxGet = (lambda k, d="": idx.get(k, d)) if isinstance(idx, dict) else (lambda k, d="": getattr(idx, k, d))
currentScope = idxGet("scope") or "personal"
currentMandateId = str(idxGet("mandateId") or "")
currentFeatureInstanceId = str(idxGet("featureInstanceId") or "")
updates = {}
if fiScope != currentScope:
updates["scope"] = fiScope
if fiMandateId and fiMandateId != currentMandateId:
updates["mandateId"] = fiMandateId
if fiFeatureInstanceId and fiFeatureInstanceId != currentFeatureInstanceId:
updates["featureInstanceId"] = fiFeatureInstanceId
if updates:
try:
knowDb.recordModify(FileContentIndex, str(idxId), updates)
updated += 1
logger.debug("migrateRagScopeFields: updated %s -> %s", idxId, updates)
except Exception as e:
logger.error("migrateRagScopeFields: failed to update %s: %s", idxId, e)
skipped += 1
else:
skipped += 1
_setMigrationCompleted(appDb)
logger.info(
"migrateRagScopeFields complete: total=%d, updated=%d, skipped=%d, orphaned=%d",
total, updated, skipped, orphaned,
)
return {"total": total, "updated": updated, "skipped": skipped, "orphaned": orphaned}

View file

@ -1,329 +0,0 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""
Migration: Root-Mandant bereinigen.
Moves all end-user data from Root mandate shared instances to own mandates.
Called once from bootstrap, sets a DB flag to prevent re-execution.
"""
import logging
from typing import Optional, List, Dict, Any
logger = logging.getLogger(__name__)
_MIGRATION_FLAG_KEY = "migration_root_users_completed"
_DATA_TABLES = [
"ChatWorkflow",
"FileItem",
"DataSource",
"DataNeutralizerAttributes",
"FileContentIndex",
]
def _isMigrationCompleted(db) -> bool:
"""Check if migration has already been executed."""
try:
from modules.datamodels.datamodelUam import Mandate
records = db.getRecordset(Mandate, recordFilter={"name": _MIGRATION_FLAG_KEY})
return len(records) > 0
except Exception:
return False
def _setMigrationCompleted(db) -> None:
"""Set flag that migration is completed (uses a settings-like record)."""
if _isMigrationCompleted(db):
return
try:
from modules.datamodels.datamodelUam import Mandate
flag = Mandate(name=_MIGRATION_FLAG_KEY, label="Migration completed", enabled=False, isSystem=True)
db.recordCreate(Mandate, flag)
logger.info("Migration flag set: root user migration completed")
except Exception as e:
logger.error(f"Failed to set migration flag: {e}")
def _findOrCreateTargetInstance(db, featureInterface, featureCode: str, targetMandateId: str, rootInstance: dict) -> dict:
"""Find existing or create new FeatureInstance in target mandate. Idempotent."""
from modules.datamodels.datamodelFeatures import FeatureInstance
existing = db.getRecordset(FeatureInstance, recordFilter={
"featureCode": featureCode,
"mandateId": targetMandateId,
})
if existing:
logger.debug(f"Target instance already exists for {featureCode} in mandate {targetMandateId}")
return existing[0]
label = rootInstance.get("label") or featureCode
instance = featureInterface.createFeatureInstance(
featureCode=featureCode,
mandateId=targetMandateId,
label=label,
enabled=True,
copyTemplateRoles=True,
)
if isinstance(instance, dict):
return instance
return instance.model_dump() if hasattr(instance, "model_dump") else {"id": instance.id}
def _migrateDataRecords(db, oldInstanceId: str, newInstanceId: str, userId: str) -> int:
"""Bulk-update featureInstanceId on all data tables for records owned by userId."""
totalMigrated = 0
db._ensure_connection()
for tableName in _DATA_TABLES:
try:
with db.connection.cursor() as cursor:
cursor.execute(
f'UPDATE "{tableName}" '
f'SET "featureInstanceId" = %s '
f'WHERE "featureInstanceId" = %s AND "sysCreatedBy" = %s',
(newInstanceId, oldInstanceId, userId),
)
count = cursor.rowcount
db.connection.commit()
if count > 0:
logger.info(f" Migrated {count} rows in {tableName}: {oldInstanceId} -> {newInstanceId}")
totalMigrated += count
except Exception as e:
try:
db.connection.rollback()
except Exception:
pass
logger.debug(f" Table {tableName} skipped (may not exist or no matching column): {e}")
return totalMigrated
def _grantFeatureAccess(db, userId: str, featureInstanceId: str) -> dict:
"""Create FeatureAccess + admin role on a feature instance. Idempotent."""
from modules.datamodels.datamodelMembership import FeatureAccess, FeatureAccessRole
from modules.datamodels.datamodelRbac import Role
existing = db.getRecordset(FeatureAccess, recordFilter={
"userId": userId,
"featureInstanceId": featureInstanceId,
})
if existing:
logger.debug(f"FeatureAccess already exists for user {userId} on instance {featureInstanceId}")
return existing[0]
fa = FeatureAccess(userId=userId, featureInstanceId=featureInstanceId, enabled=True)
createdFa = db.recordCreate(FeatureAccess, fa.model_dump())
if not createdFa:
logger.warning(f"Failed to create FeatureAccess for user {userId} on instance {featureInstanceId}")
return {}
instanceRoles = db.getRecordset(Role, recordFilter={"featureInstanceId": featureInstanceId})
adminRoleId = None
for r in instanceRoles:
roleLabel = (r.get("roleLabel") or "").lower()
if roleLabel.endswith("-admin"):
adminRoleId = r.get("id")
break
if not adminRoleId:
raise ValueError(
f"No feature-specific admin role for instance {featureInstanceId}. "
f"Cannot create FeatureAccess without role — even in migration context."
)
far = FeatureAccessRole(featureAccessId=createdFa["id"], roleId=adminRoleId)
db.recordCreate(FeatureAccessRole, far.model_dump())
return createdFa
def migrateRootUsers(db, dryRun: bool = False) -> dict:
"""
Migrate all end-user feature data from Root mandate to personal mandates.
Algorithm:
STEP 1: For each user with FeatureAccess on Root instances:
- If user has own mandate: target = existing mandate
- If not: create personal mandate via _provisionMandateForUser
- For each FeatureAccess: create new instance in target, migrate data, transfer access
STEP 2: Clean up Root:
- Delete all FeatureInstances in Root
- Remove UserMandate for non-sysadmin users
Args:
db: Database connector
dryRun: If True, log actions without making changes
Returns:
Summary dict with migration statistics
"""
if _isMigrationCompleted(db):
logger.info("Root user migration already completed, skipping")
return {"status": "already_completed"}
from modules.datamodels.datamodelUam import Mandate, User, UserInDB
from modules.datamodels.datamodelMembership import (
UserMandate, UserMandateRole, FeatureAccess, FeatureAccessRole,
)
from modules.datamodels.datamodelFeatures import FeatureInstance
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.interfaces.interfaceFeatures import getFeatureInterface
rootInterface = getRootInterface()
featureInterface = getFeatureInterface(db)
stats = {
"usersProcessed": 0,
"mandatesCreated": 0,
"instancesMigrated": 0,
"dataRowsMigrated": 0,
"rootInstancesDeleted": 0,
"rootMembershipsRemoved": 0,
"dryRun": dryRun,
}
# Find root mandate
rootMandates = db.getRecordset(Mandate, recordFilter={"name": "root", "isSystem": True})
if not rootMandates:
logger.warning("No root mandate found, nothing to migrate")
return {"status": "no_root_mandate"}
rootMandateId = rootMandates[0].get("id")
# Get all feature instances in root
rootInstances = db.getRecordset(FeatureInstance, recordFilter={"mandateId": rootMandateId})
if not rootInstances:
logger.info("No feature instances in root mandate, nothing to migrate")
if not dryRun:
_setMigrationCompleted(db)
return {"status": "no_instances", **stats}
# Get all FeatureAccess on root instances
rootInstanceIds = {inst.get("id") for inst in rootInstances}
# Collect unique users with access on root instances
usersToMigrate = {}
for instanceId in rootInstanceIds:
accesses = db.getRecordset(FeatureAccess, recordFilter={"featureInstanceId": instanceId})
for access in accesses:
userId = access.get("userId")
if userId not in usersToMigrate:
usersToMigrate[userId] = []
usersToMigrate[userId].append({
"featureAccessId": access.get("id"),
"featureInstanceId": instanceId,
})
logger.info(f"Migration: {len(usersToMigrate)} users with {sum(len(v) for v in usersToMigrate.values())} accesses on {len(rootInstances)} root instances")
# STEP 1: Migrate users
for userId, accessList in usersToMigrate.items():
try:
# Find user
users = db.getRecordset(UserInDB, recordFilter={"id": userId})
if not users:
logger.warning(f"User {userId} not found, skipping")
continue
user = users[0]
username = user.get("username", "unknown")
# Check if user has own non-root mandate
userMandates = db.getRecordset(UserMandate, recordFilter={"userId": userId, "enabled": True})
targetMandateId = None
for um in userMandates:
mid = um.get("mandateId")
if mid != rootMandateId:
targetMandateId = mid
break
if not targetMandateId:
# Create personal mandate
if dryRun:
logger.info(f"[DRY RUN] Would create personal mandate for user {username}")
stats["mandatesCreated"] += 1
else:
try:
result = rootInterface._provisionMandateForUser(
userId=userId,
mandateName=f"Home {username}",
planKey="TRIAL_14D",
)
targetMandateId = result["mandateId"]
stats["mandatesCreated"] += 1
logger.info(f"Created personal mandate {targetMandateId} for user {username}")
except Exception as e:
logger.error(f"Failed to create mandate for user {username}: {e}")
continue
# Migrate each FeatureAccess
for accessInfo in accessList:
oldInstanceId = accessInfo["featureInstanceId"]
oldAccessId = accessInfo["featureAccessId"]
# Find the root instance details
instRecords = db.getRecordset(FeatureInstance, recordFilter={"id": oldInstanceId})
if not instRecords:
continue
featureCode = instRecords[0].get("featureCode")
if dryRun:
logger.info(f"[DRY RUN] Would migrate {featureCode} for {username} to mandate {targetMandateId}")
stats["instancesMigrated"] += 1
else:
targetInstance = _findOrCreateTargetInstance(
db, featureInterface, featureCode, targetMandateId, instRecords[0],
)
newInstanceId = targetInstance.get("id")
if not newInstanceId:
logger.error(f"Failed to obtain target instance for {featureCode} in mandate {targetMandateId}")
continue
migratedCount = _migrateDataRecords(db, oldInstanceId, newInstanceId, userId)
_grantFeatureAccess(db, userId, newInstanceId)
try:
db.recordDelete(FeatureAccess, oldAccessId)
except Exception as delErr:
logger.warning(f"Could not remove old FeatureAccess {oldAccessId}: {delErr}")
logger.info(
f"Migrated {featureCode} for {username}: "
f"instance {oldInstanceId} -> {newInstanceId}, {migratedCount} data rows moved"
)
stats["instancesMigrated"] += 1
stats["dataRowsMigrated"] += migratedCount
stats["usersProcessed"] += 1
except Exception as e:
logger.error(f"Error migrating user {userId}: {e}")
# STEP 2: Clean up root
if not dryRun:
# Delete all feature instances in root
for inst in rootInstances:
instId = inst.get("id")
try:
# First delete all FeatureAccess on this instance
accesses = db.getRecordset(FeatureAccess, recordFilter={"featureInstanceId": instId})
for access in accesses:
db.recordDelete(FeatureAccess, access.get("id"))
db.recordDelete(FeatureInstance, instId)
stats["rootInstancesDeleted"] += 1
except Exception as e:
logger.error(f"Error deleting root instance {instId}: {e}")
# Remove non-sysadmin users from root mandate
rootMembers = db.getRecordset(UserMandate, recordFilter={"mandateId": rootMandateId})
for membership in rootMembers:
membUserId = membership.get("userId")
userRecords = db.getRecordset(UserInDB, recordFilter={"id": membUserId})
if userRecords and userRecords[0].get("isSysAdmin"):
continue
try:
db.recordDelete(UserMandate, membership.get("id"))
stats["rootMembershipsRemoved"] += 1
except Exception as e:
logger.error(f"Error removing root membership for {membUserId}: {e}")
_setMigrationCompleted(db)
logger.info(f"Migration completed: {stats}")
return {"status": "completed", **stats}

View file

@ -1,316 +0,0 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""
Migration: Voice settings consolidation and CoachingDocument scope-tagging.
Moves VoiceSettings (workspace DB) and CoachingUserProfile voice fields (commcoach DB)
into the unified UserVoicePreferences model, and tags CoachingDocument files with
featureInstance scope before deleting the legacy records.
Called once from bootstrap, sets a DB flag to prevent re-execution.
"""
import logging
import uuid
from typing import Dict, List, Optional
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG
from modules.datamodels.datamodelUam import UserVoicePreferences
logger = logging.getLogger(__name__)
_MIGRATION_FLAG_KEY = "migration_voice_documents_completed"
def _isMigrationCompleted(db) -> bool:
"""Check if migration has already been executed."""
try:
from modules.datamodels.datamodelUam import Mandate
records = db.getRecordset(Mandate, recordFilter={"name": _MIGRATION_FLAG_KEY})
return len(records) > 0
except Exception:
return False
def _setMigrationCompleted(db) -> None:
"""Set flag that migration is completed (uses a settings-like record)."""
if _isMigrationCompleted(db):
return
try:
from modules.datamodels.datamodelUam import Mandate
flag = Mandate(name=_MIGRATION_FLAG_KEY, label="Migration completed", enabled=False, isSystem=True)
db.recordCreate(Mandate, flag)
logger.info("Migration flag set: voice & documents migration completed")
except Exception as e:
logger.error(f"Failed to set migration flag: {e}")
def _getRawRows(connector: DatabaseConnector, tableName: str, columns: List[str]) -> List[Dict]:
"""Read all rows from a table via raw SQL. Returns empty list if table doesn't exist."""
try:
connector._ensure_connection()
colList = ", ".join(f'"{c}"' for c in columns)
with connector.connection.cursor() as cur:
cur.execute(
"SELECT COUNT(*) FROM information_schema.tables "
"WHERE LOWER(table_name) = LOWER(%s) AND table_schema = 'public'",
(tableName,),
)
if cur.fetchone()["count"] == 0:
logger.info(f"Table '{tableName}' does not exist, skipping")
return []
cur.execute(f'SELECT {colList} FROM "{tableName}"')
return [dict(row) for row in cur.fetchall()]
except Exception as e:
logger.warning(f"Raw query on '{tableName}' failed: {e}")
try:
connector.connection.rollback()
except Exception:
pass
return []
def _deleteRawRow(connector: DatabaseConnector, tableName: str, rowId: str) -> bool:
"""Delete a single row by id via raw SQL."""
try:
connector._ensure_connection()
with connector.connection.cursor() as cur:
cur.execute(f'DELETE FROM "{tableName}" WHERE "id" = %s', (rowId,))
connector.connection.commit()
return True
except Exception as e:
logger.warning(f"Failed to delete row {rowId} from '{tableName}': {e}")
try:
connector.connection.rollback()
except Exception:
pass
return False
def _createDbConnector(dbName: str) -> Optional[DatabaseConnector]:
"""Create a DatabaseConnector for a named database, returns None on failure."""
try:
dbHost = APP_CONFIG.get("DB_HOST")
dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
return DatabaseConnector(
dbHost=dbHost,
dbDatabase=dbName,
dbUser=dbUser,
dbPassword=dbPassword,
dbPort=dbPort,
)
except Exception as e:
logger.warning(f"Could not connect to database '{dbName}': {e}")
return None
# ─── Part A ───────────────────────────────────────────────────────────────────
def _migrateVoiceSettings(db, wsDb: DatabaseConnector, dryRun: bool, stats: Dict) -> None:
"""Migrate VoiceSettings records from poweron_workspace into UserVoicePreferences."""
rows = _getRawRows(wsDb, "VoiceSettings", [
"id", "userId", "mandateId", "ttsVoiceMap", "sttLanguage", "ttsLanguage", "ttsVoice",
])
if not rows:
logger.info("Part A: No VoiceSettings records found, skipping")
return
for row in rows:
userId = row.get("userId")
if not userId:
continue
existing = db.getRecordset(UserVoicePreferences, recordFilter={"userId": userId})
if existing:
stats["voiceSettingsSkipped"] += 1
if not dryRun:
_deleteRawRow(wsDb, "VoiceSettings", row["id"])
continue
if dryRun:
logger.info(f"[DRY RUN] Would create UserVoicePreferences for user {userId} from VoiceSettings")
stats["voiceSettingsCreated"] += 1
continue
try:
import json
ttsVoiceMap = row.get("ttsVoiceMap")
if isinstance(ttsVoiceMap, str):
try:
ttsVoiceMap = json.loads(ttsVoiceMap)
except (json.JSONDecodeError, TypeError):
ttsVoiceMap = None
prefs = UserVoicePreferences(
userId=userId,
mandateId=row.get("mandateId"),
ttsVoiceMap=ttsVoiceMap,
sttLanguage=row.get("sttLanguage", "de-DE"),
ttsLanguage=row.get("ttsLanguage", "de-DE"),
ttsVoice=row.get("ttsVoice"),
)
db.recordCreate(UserVoicePreferences, prefs)
stats["voiceSettingsCreated"] += 1
_deleteRawRow(wsDb, "VoiceSettings", row["id"])
except Exception as e:
logger.error(f"Part A: Failed to migrate VoiceSettings {row['id']}: {e}")
stats["errors"] += 1
# ─── Part B ───────────────────────────────────────────────────────────────────
def _migrateCoachingProfileVoice(db, ccDb: DatabaseConnector, dryRun: bool, stats: Dict) -> None:
"""Migrate preferredLanguage/preferredVoice from CoachingUserProfile into UserVoicePreferences."""
rows = _getRawRows(ccDb, "CoachingUserProfile", [
"id", "userId", "mandateId", "preferredLanguage", "preferredVoice",
])
if not rows:
logger.info("Part B: No CoachingUserProfile records with voice data found, skipping")
return
for row in rows:
userId = row.get("userId")
prefLang = row.get("preferredLanguage")
prefVoice = row.get("preferredVoice")
if not userId or (not prefLang and not prefVoice):
continue
existing = db.getRecordset(UserVoicePreferences, recordFilter={"userId": userId})
if existing:
stats["coachingProfileSkipped"] += 1
continue
if dryRun:
logger.info(f"[DRY RUN] Would create UserVoicePreferences for user {userId} from CoachingUserProfile")
stats["coachingProfileCreated"] += 1
continue
try:
prefs = UserVoicePreferences(
userId=userId,
mandateId=row.get("mandateId"),
sttLanguage=prefLang or "de-DE",
ttsLanguage=prefLang or "de-DE",
ttsVoice=prefVoice,
)
db.recordCreate(UserVoicePreferences, prefs)
stats["coachingProfileCreated"] += 1
except Exception as e:
logger.error(f"Part B: Failed to migrate CoachingUserProfile {row['id']}: {e}")
stats["errors"] += 1
# ─── Part C ───────────────────────────────────────────────────────────────────
def _migrateCoachingDocuments(ccDb: DatabaseConnector, dryRun: bool, stats: Dict) -> None:
"""Tag FileItem/FileContentIndex with featureInstance scope for each CoachingDocument."""
from modules.datamodels.datamodelFiles import FileItem
from modules.datamodels.datamodelKnowledge import FileContentIndex
rows = _getRawRows(ccDb, "CoachingDocument", [
"id", "fileRef", "instanceId",
])
if not rows:
logger.info("Part C: No CoachingDocument records found, skipping")
return
mgmtDb = _createDbConnector("poweron_management")
knowledgeDb = _createDbConnector("poweron_knowledge")
if not mgmtDb:
logger.error("Part C: Cannot connect to poweron_management, aborting document migration")
return
for row in rows:
fileRef = row.get("fileRef")
instanceId = row.get("instanceId")
docId = row.get("id")
if not fileRef:
if not dryRun:
_deleteRawRow(ccDb, "CoachingDocument", docId)
continue
if dryRun:
logger.info(f"[DRY RUN] Would tag FileItem {fileRef} with featureInstanceId={instanceId}")
stats["documentsTagged"] += 1
continue
try:
fileRecords = mgmtDb.getRecordset(FileItem, recordFilter={"id": fileRef})
if fileRecords:
updateData = {"scope": "featureInstance"}
if instanceId:
updateData["featureInstanceId"] = instanceId
mgmtDb.recordModify(FileItem, fileRef, updateData)
stats["documentsTagged"] += 1
else:
logger.warning(f"Part C: FileItem {fileRef} not found in management DB")
if knowledgeDb:
fciRecords = knowledgeDb.getRecordset(FileContentIndex, recordFilter={"id": fileRef})
if fciRecords:
fciUpdate = {"scope": "featureInstance"}
if instanceId:
fciUpdate["featureInstanceId"] = instanceId
knowledgeDb.recordModify(FileContentIndex, fileRef, fciUpdate)
_deleteRawRow(ccDb, "CoachingDocument", docId)
except Exception as e:
logger.error(f"Part C: Failed to migrate CoachingDocument {docId}: {e}")
stats["errors"] += 1
# ─── Main entry ───────────────────────────────────────────────────────────────
def migrateVoiceAndDocuments(db, dryRun: bool = False) -> dict:
"""
Migrate VoiceSettings + CoachingUserProfile voice fields into UserVoicePreferences,
and tag CoachingDocument files with featureInstance scope.
Args:
db: Root database connector (poweron_app)
dryRun: If True, log actions without making changes
Returns:
Summary dict with migration statistics
"""
if _isMigrationCompleted(db):
logger.info("Voice & documents migration already completed, skipping")
return {"status": "already_completed"}
stats = {
"voiceSettingsCreated": 0,
"voiceSettingsSkipped": 0,
"coachingProfileCreated": 0,
"coachingProfileSkipped": 0,
"documentsTagged": 0,
"errors": 0,
"dryRun": dryRun,
}
wsDb = _createDbConnector("poweron_workspace")
ccDb = _createDbConnector("poweron_commcoach")
# Part A
if wsDb:
_migrateVoiceSettings(db, wsDb, dryRun, stats)
else:
logger.warning("Skipping Part A: poweron_workspace DB unavailable")
# Part B
if ccDb:
_migrateCoachingProfileVoice(db, ccDb, dryRun, stats)
else:
logger.warning("Skipping Part B: poweron_commcoach DB unavailable")
# Part C
if ccDb:
_migrateCoachingDocuments(ccDb, dryRun, stats)
else:
logger.warning("Skipping Part C: poweron_commcoach DB unavailable")
if not dryRun:
_setMigrationCompleted(db)
logger.info(f"Voice & documents migration completed: {stats}")
return {"status": "completed", **stats}

View file

@ -18,7 +18,7 @@ import json
import math
from pydantic import BaseModel, Field
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
from modules.routes.routeDataUsers import _applyFiltersAndSort, _extractDistinctValues
from modules.routes.routeHelpers import _applyFiltersAndSort, handleFilterValuesInMemory, handleIdsInMemory
from modules.auth import limiter, getRequestContext, RequestContext, requireSysAdminRole
from modules.datamodels.datamodelUam import User, UserInDB
@ -405,6 +405,8 @@ def list_feature_instances(
request: Request,
featureCode: Optional[str] = Query(None, description="Filter by feature code"),
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
):
"""
@ -454,6 +456,14 @@ def list_feature_instances(
items = [inst.model_dump() for inst in instances]
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
return handleFilterValuesInMemory(items, column, pagination)
if mode == "ids":
return handleIdsInMemory(items, pagination)
if paginationParams:
filtered = _applyFiltersAndSort(items, paginationParams)
totalItems = len(filtered)
@ -484,35 +494,6 @@ def list_feature_instances(
)
@router.get("/instances/filter-values")
@limiter.limit("60/minute")
def get_feature_instance_filter_values(
request: Request,
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
featureCode: Optional[str] = Query(None, description="Filter by feature code"),
context: RequestContext = Depends(getRequestContext)
) -> list:
"""Return distinct filter values for a column in feature instances."""
if not context.mandateId:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=routeApiMsg("X-Mandate-Id header is required"))
try:
from modules.routes.routeDataUsers import _handleFilterValuesRequest
rootInterface = getRootInterface()
featureInterface = getFeatureInterface(rootInterface.db)
instances = featureInterface.getFeatureInstancesForMandate(
mandateId=str(context.mandateId),
featureCode=featureCode
)
items = [inst.model_dump() for inst in instances]
return _handleFilterValuesRequest(items, column, pagination)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting filter values for feature instances: {e}")
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e))
@router.get("/instances/{instanceId}", response_model=Dict[str, Any])
@limiter.limit("60/minute")
def get_feature_instance(
@ -860,6 +841,115 @@ def sync_instance_roles(
)
class SyncWorkflowsResult(BaseModel):
"""Response model for workflow synchronization"""
added: int
skipped: int
total: int
@router.post("/instances/{instanceId}/sync-workflows", response_model=SyncWorkflowsResult)
@limiter.limit("10/minute")
def _syncInstanceWorkflows(
request: Request,
instanceId: str,
context: RequestContext = Depends(getRequestContext)
) -> SyncWorkflowsResult:
"""
Synchronize template workflows for a feature instance.
Copies missing template workflows to the instance. Workflows that already
exist (matched by templateSourceId) are skipped. This is useful for
instances created before template workflows were defined, or when
the initial copy failed silently.
SysAdmin only.
"""
try:
requireSysAdminRole(context.user)
rootInterface = getRootInterface()
featureInterface = getFeatureInterface(rootInterface.db)
instance = featureInterface.getFeatureInstance(instanceId)
if not instance:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Feature instance '{instanceId}' not found"
)
featureCode = instance.get("featureCode") if isinstance(instance, dict) else instance.featureCode
mandateId = instance.get("mandateId") if isinstance(instance, dict) else instance.mandateId
from modules.system.registry import loadFeatureMainModules
mainModules = loadFeatureMainModules()
featureModule = mainModules.get(featureCode)
if not featureModule:
return SyncWorkflowsResult(added=0, skipped=0, total=0)
getTemplateWorkflows = getattr(featureModule, "getTemplateWorkflows", None)
if not getTemplateWorkflows:
return SyncWorkflowsResult(added=0, skipped=0, total=0)
templateWorkflows = getTemplateWorkflows()
if not templateWorkflows:
return SyncWorkflowsResult(added=0, skipped=0, total=0)
from modules.features.graphicalEditor.interfaceFeatureGraphicalEditor import getGraphicalEditorInterface
from modules.security.rootAccess import getRootUser
rootUser = getRootUser()
geInterface = getGraphicalEditorInterface(rootUser, mandateId, instanceId)
existingWorkflows = geInterface.getWorkflows() or []
existingSourceIds = set()
for w in existingWorkflows:
sourceId = w.get("templateSourceId") if isinstance(w, dict) else getattr(w, "templateSourceId", None)
if sourceId:
existingSourceIds.add(sourceId)
added = 0
skipped = 0
for template in templateWorkflows:
if template["id"] in existingSourceIds:
skipped += 1
continue
import json as _json
graphJson = _json.dumps(template.get("graph", {}))
graphJson = graphJson.replace("{{featureInstanceId}}", instanceId)
graph = _json.loads(graphJson)
label = resolveText(template.get("label"))
geInterface.createWorkflow({
"label": label,
"graph": graph,
"tags": template.get("tags", [f"feature:{featureCode}"]),
"isTemplate": False,
"templateSourceId": template["id"],
"templateScope": "instance",
"active": True,
})
added += 1
logger.info(
f"User {context.user.id} synced workflows for instance {instanceId} "
f"({featureCode}): added={added}, skipped={skipped}"
)
return SyncWorkflowsResult(added=added, skipped=skipped, total=len(templateWorkflows))
except HTTPException:
raise
except Exception as e:
logger.error(f"Error syncing workflows for instance {instanceId}: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to sync workflows: {str(e)}"
)
# =============================================================================
# Template Role Endpoints (SysAdmin only)
# =============================================================================
@ -883,6 +973,8 @@ def list_template_roles(
request: Request,
featureCode: Optional[str] = Query(None, description="Filter by feature code"),
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
sysAdmin: User = Depends(requireSysAdminRole),
):
"""List global template roles with pagination support."""
@ -898,6 +990,15 @@ def list_template_roles(
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
enriched = _buildTemplateRolesList(featureCode)
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
return handleFilterValuesInMemory(enriched, column, pagination)
if mode == "ids":
return handleIdsInMemory(enriched, pagination)
filtered = _applyFiltersAndSort(enriched, paginationParams)
if paginationParams:
@ -927,39 +1028,6 @@ def list_template_roles(
)
@router.get("/templates/roles/filter-values")
@limiter.limit("60/minute")
def get_template_role_filter_values(
request: Request,
column: str = Query(..., description="Column key"),
featureCode: Optional[str] = Query(None, description="Filter by feature code"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
sysAdmin: User = Depends(requireSysAdminRole),
):
"""Return distinct filter values for a column in template roles."""
try:
crossFilterParams: Optional[PaginationParams] = None
if pagination:
try:
paginationDict = json.loads(pagination)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
filters = paginationDict.get("filters", {})
filters.pop(column, None)
paginationDict["filters"] = filters
paginationDict.pop("sort", None)
crossFilterParams = PaginationParams(**paginationDict)
except (json.JSONDecodeError, ValueError):
pass
enriched = _buildTemplateRolesList(featureCode)
crossFiltered = _applyFiltersAndSort(enriched, crossFilterParams)
return _extractDistinctValues(crossFiltered, column)
except Exception as e:
logger.error(f"Error getting filter values: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/templates/roles", response_model=Dict[str, Any])
@limiter.limit("10/minute")
def create_template_role(
@ -1051,6 +1119,8 @@ def list_feature_instance_users(
request: Request,
instanceId: str,
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
):
"""
@ -1114,6 +1184,14 @@ def list_feature_instance_users(
items = [r.model_dump() for r in result]
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
return handleFilterValuesInMemory(items, column, pagination)
if mode == "ids":
return handleIdsInMemory(items, pagination)
paginationParams = None
if pagination:
try:
@ -1150,56 +1228,6 @@ def list_feature_instance_users(
)
@router.get("/instances/{instanceId}/users/filter-values")
@limiter.limit("60/minute")
def get_feature_instance_users_filter_values(
request: Request,
instanceId: str,
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
context: RequestContext = Depends(getRequestContext)
) -> list:
"""Return distinct filter values for a column in feature instance users."""
try:
from modules.routes.routeDataUsers import _handleFilterValuesRequest
rootInterface = getRootInterface()
featureInterface = getFeatureInterface(rootInterface.db)
instance = featureInterface.getFeatureInstance(instanceId)
if not instance:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Feature instance '{instanceId}' not found")
if context.mandateId and str(instance.mandateId) != str(context.mandateId):
if not context.hasSysAdminRole:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=routeApiMsg("Access denied to this feature instance"))
featureAccesses = rootInterface.getFeatureAccessesByInstance(instanceId)
result = []
for fa in featureAccesses:
user = rootInterface.getUser(str(fa.userId))
if not user:
continue
roleIds = rootInterface.getRoleIdsForFeatureAccess(str(fa.id))
roleLabels = []
for roleId in roleIds:
role = rootInterface.getRole(roleId)
if role:
roleLabels.append(role.roleLabel)
result.append({
"id": str(fa.id),
"userId": str(fa.userId),
"username": user.username,
"email": user.email,
"fullName": user.fullName,
"roleIds": roleIds,
"roleLabels": roleLabels,
"enabled": fa.enabled
})
return _handleFilterValuesRequest(result, column, pagination)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting filter values for feature instance users: {e}")
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e))
@router.post("/instances/{instanceId}/users", response_model=Dict[str, Any])
@limiter.limit("30/minute")
def add_user_to_feature_instance(

View file

@ -810,6 +810,8 @@ def list_roles(
includeTemplates: bool = Query(False, description="Include feature template roles"),
mandateId: Optional[str] = Query(None, description="Include mandate-specific roles for this mandate"),
scopeFilter: Optional[str] = Query(None, description="Filter by scope: 'all', 'mandate', 'global', 'system'"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
reqContext: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse:
"""
@ -924,6 +926,16 @@ def list_roles(
if not isSysAdmin:
result = [r for r in result if r.get("mandateId") and str(r["mandateId"]) in adminMandateIds]
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
from modules.routes.routeHelpers import handleFilterValuesInMemory
return handleFilterValuesInMemory(result, column, pagination)
if mode == "ids":
from modules.routes.routeHelpers import handleIdsInMemory
return handleIdsInMemory(result, pagination)
# Apply search, filtering and sorting if pagination requested
if paginationParams:
# Apply search (if search term provided in filters)
@ -987,77 +999,6 @@ def list_roles(
)
@router.get("/roles/filter-values")
@limiter.limit("60/minute")
def get_roles_filter_values(
request: Request,
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
includeTemplates: bool = Query(False, description="Include feature template roles"),
mandateId: Optional[str] = Query(None, description="Include mandate-specific roles for this mandate"),
scopeFilter: Optional[str] = Query(None, description="Filter by scope: 'all', 'mandate', 'global', 'system'"),
reqContext: RequestContext = Depends(getRequestContext)
) -> list:
"""Return distinct filter values for a column in roles."""
try:
from modules.routes.routeDataUsers import _handleFilterValuesRequest
isSysAdmin = reqContext.hasSysAdminRole
adminMandateIds = [] if isSysAdmin else _getAdminMandateIds(reqContext)
if not isSysAdmin and not adminMandateIds:
raise HTTPException(status_code=403, detail=routeApiMsg("Admin role required"))
interface = getRootInterface()
dbRoles = interface.getAllRoles(pagination=None)
roleCounts = interface.countRoleAssignments()
def _computeScopeType(role) -> str:
if role.mandateId:
return "mandate"
if role.isSystemRole:
return "system"
return "global"
result = []
for role in dbRoles:
if role.featureInstanceId is not None:
continue
if mandateId:
if role.mandateId != mandateId:
continue
else:
if role.mandateId is not None:
continue
if not includeTemplates and role.featureCode is not None:
continue
scopeType = _computeScopeType(role)
if scopeFilter and scopeFilter != 'all':
if scopeFilter == 'mandate' and scopeType != 'mandate':
continue
if scopeFilter == 'global' and scopeType not in ('global', 'system'):
continue
if scopeFilter == 'system' and scopeType != 'system':
continue
result.append({
"id": role.id,
"roleLabel": role.roleLabel,
"description": resolveText(role.description),
"mandateId": role.mandateId,
"featureInstanceId": role.featureInstanceId,
"featureCode": role.featureCode,
"userCount": roleCounts.get(str(role.id), 0),
"isSystemRole": role.isSystemRole,
"scopeType": scopeType
})
if not isSysAdmin:
result = [r for r in result if r.get("mandateId") and str(r["mandateId"]) in adminMandateIds]
return _handleFilterValuesRequest(result, column, pagination)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting filter values for roles: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/roles", response_model=Dict[str, Any])
@limiter.limit("30/minute")
def create_role(

View file

@ -25,7 +25,6 @@ from modules.serviceCenter.services.serviceBilling.mainServiceBilling import get
import json
import math
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata, normalize_pagination_dict
from modules.routes.routeDataUsers import _applyFiltersAndSort, _extractDistinctValues, _handleFilterValuesRequest
from modules.datamodels.datamodelBilling import (
BillingAccount,
BillingTransaction,
@ -1707,6 +1706,8 @@ def getUserViewTransactions(
scope: str = Query(default="all", description="Scope: 'personal' (own costs only), 'mandate' (filter by mandateId), 'all' (RBAC-filtered)"),
mandateId: Optional[str] = Query(None, description="Mandate ID filter (used with scope='mandate')"),
onlyMine: Optional[bool] = Query(None, description="Additional filter: restrict to current user's transactions within the selected scope"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
ctx: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[UserTransactionResponse]:
"""
@ -1726,16 +1727,10 @@ def getUserViewTransactions(
- mandateId: required when scope='mandate'
- onlyMine: true to restrict to current user's data within the scope
"""
from modules.routes.routeHelpers import parseCrossFilterPagination
try:
billingInterface = getBillingInterface(ctx.user, ctx.mandateId)
paginationParams = None
if pagination:
import json
paginationDict = json.loads(pagination)
paginationDict = normalize_pagination_dict(paginationDict)
paginationParams = PaginationParams(**paginationDict)
rbacScope = _getBillingDataScope(ctx.user)
if rbacScope.isGlobalAdmin:
@ -1743,14 +1738,54 @@ def getUserViewTransactions(
else:
loadMandateIds = rbacScope.adminMandateIds + rbacScope.memberMandateIds
if not loadMandateIds:
if mode:
return []
return PaginatedResponse(items=[], pagination=None)
if scope == "mandate" and mandateId:
loadMandateIds = [mandateId]
effectiveScope = scope
personalUserId = str(ctx.user.id) if (scope == "personal" or onlyMine) else None
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
from fastapi.responses import JSONResponse
crossFilterParams = parseCrossFilterPagination(column, pagination)
values = billingInterface.getTransactionDistinctValues(
mandateIds=loadMandateIds,
column=column,
pagination=crossFilterParams,
scope=scope,
userId=personalUserId,
)
return JSONResponse(content=values)
if mode == "ids":
from fastapi.responses import JSONResponse
paginationParams = None
if pagination:
import json as _json
paginationDict = _json.loads(pagination)
paginationDict = normalize_pagination_dict(paginationDict)
paginationParams = PaginationParams(**paginationDict)
ids = billingInterface.getTransactionIds(
mandateIds=loadMandateIds,
pagination=paginationParams,
scope=scope,
userId=personalUserId,
) if hasattr(billingInterface, 'getTransactionIds') else []
return JSONResponse(content=ids)
paginationParams = None
if pagination:
import json as _json
paginationDict = _json.loads(pagination)
paginationDict = normalize_pagination_dict(paginationDict)
paginationParams = PaginationParams(**paginationDict)
effectiveScope = scope
if not paginationParams:
paginationParams = PaginationParams(page=1, pageSize=50)
@ -1800,58 +1835,3 @@ def getUserViewTransactions(
except Exception as e:
logger.error(f"Error getting user view transactions: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/view/users/transactions/filter-values")
@limiter.limit("60/minute")
def getUserViewTransactionsFilterValues(
request: Request,
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
scope: str = Query(default="all", description="Scope: 'personal', 'mandate', 'all'"),
mandateId: Optional[str] = Query(None, description="Mandate ID filter (used with scope='mandate')"),
onlyMine: Optional[bool] = Query(None, description="Additional filter: restrict to current user's data within the selected scope"),
ctx: RequestContext = Depends(getRequestContext)
):
"""Return distinct filter values for a column in user transactions (SQL DISTINCT)."""
try:
billingInterface = getBillingInterface(ctx.user, ctx.mandateId)
rbacScope = _getBillingDataScope(ctx.user)
if rbacScope.isGlobalAdmin:
loadMandateIds = None
else:
loadMandateIds = rbacScope.adminMandateIds + rbacScope.memberMandateIds
if not loadMandateIds:
return []
if scope == "mandate" and mandateId:
loadMandateIds = [mandateId]
crossFilterParams = None
if pagination:
try:
import json
paginationDict = json.loads(pagination)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
filters = paginationDict.get("filters", {})
filters.pop(column, None)
paginationDict["filters"] = filters
paginationDict.pop("sort", None)
crossFilterParams = PaginationParams(**paginationDict)
except (json.JSONDecodeError, ValueError):
pass
personalUserId = str(ctx.user.id) if (scope == "personal" or onlyMine) else None
return billingInterface.getTransactionDistinctValues(
mandateIds=loadMandateIds,
column=column,
pagination=crossFilterParams,
scope=scope,
userId=personalUserId,
)
except Exception as e:
logger.error(f"Error getting filter values for user transactions: {e}")
raise HTTPException(status_code=500, detail=str(e))

View file

@ -132,6 +132,8 @@ def get_auth_authority_options(
async def get_connections(
request: Request,
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
currentUser: User = Depends(getCurrentUser)
) -> PaginatedResponse[UserConnection]:
"""Get connections for the current user with optional pagination, sorting, and filtering.
@ -146,7 +148,49 @@ async def get_connections(
- GET /api/connections/ (no pagination - returns all items)
- GET /api/connections/?pagination={"page":1,"pageSize":10,"sort":[]}
- GET /api/connections/?pagination={"page":1,"pageSize":10,"filters":{"status":"active"}}
- GET /api/connections/?mode=filterValues&column=status
- GET /api/connections/?mode=ids
"""
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
def _buildEnhancedItems():
interface = getInterface(currentUser)
connections = interface.getUserConnections(currentUser.id)
items = []
for connection in connections:
tokenStatus, tokenExpiresAt = getTokenStatusForConnection(interface, connection.id)
items.append({
"id": connection.id,
"userId": connection.userId,
"authority": connection.authority.value if hasattr(connection.authority, 'value') else str(connection.authority),
"externalId": connection.externalId,
"externalUsername": connection.externalUsername or "",
"externalEmail": connection.externalEmail,
"status": connection.status.value if hasattr(connection.status, 'value') else str(connection.status),
"connectedAt": connection.connectedAt,
"lastChecked": connection.lastChecked,
"expiresAt": connection.expiresAt,
"tokenStatus": tokenStatus,
"tokenExpiresAt": tokenExpiresAt
})
return items
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
try:
return handleFilterValuesInMemory(_buildEnhancedItems(), column, pagination)
except Exception as e:
logger.error(f"Error getting filter values for connections: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
if mode == "ids":
try:
return handleIdsInMemory(_buildEnhancedItems(), pagination)
except Exception as e:
logger.error(f"Error getting IDs for connections: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
try:
interface = getInterface(currentUser)
@ -295,42 +339,6 @@ async def get_connections(
detail=f"Failed to get connections: {str(e)}"
)
@router.get("/filter-values")
@limiter.limit("60/minute")
def get_connection_filter_values(
request: Request,
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
currentUser: User = Depends(getCurrentUser)
) -> List[str]:
"""Return distinct filter values for a column in connections."""
try:
from modules.routes.routeDataUsers import _handleFilterValuesRequest
interface = getInterface(currentUser)
connections = interface.getUserConnections(currentUser.id)
items = []
for connection in connections:
tokenStatus, tokenExpiresAt = getTokenStatusForConnection(interface, connection.id)
items.append({
"id": connection.id,
"userId": connection.userId,
"authority": connection.authority.value if hasattr(connection.authority, 'value') else str(connection.authority),
"externalId": connection.externalId,
"externalUsername": connection.externalUsername or "",
"externalEmail": connection.externalEmail,
"status": connection.status.value if hasattr(connection.status, 'value') else str(connection.status),
"connectedAt": connection.connectedAt,
"lastChecked": connection.lastChecked,
"expiresAt": connection.expiresAt,
"tokenStatus": tokenStatus,
"tokenExpiresAt": tokenExpiresAt
})
return _handleFilterValuesRequest(items, column, pagination)
except Exception as e:
logger.error(f"Error getting filter values for connections: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/", response_model=UserConnection)
@limiter.limit("10/minute")
def create_connection(

View file

@ -177,6 +177,8 @@ router = APIRouter(
def get_files(
request: Request,
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
currentUser: User = Depends(getCurrentUser),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[FileItem]:
@ -207,20 +209,45 @@ def get_files(
detail=f"Invalid pagination parameter: {str(e)}"
)
recordFilter = None
if paginationParams and paginationParams.filters and "folderId" in paginationParams.filters:
fVal = paginationParams.filters.pop("folderId")
recordFilter = {"folderId": fVal}
from modules.routes.routeHelpers import (
handleFilterValuesInMemory,
handleIdsMode,
parseCrossFilterPagination,
)
managementInterface = interfaceDbManagement.getInterface(
currentUser,
mandateId=str(context.mandateId) if context.mandateId else None,
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None
)
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
crossPagination = parseCrossFilterPagination(column, pagination)
recordFilter = {"sysCreatedBy": managementInterface.userId}
try:
from fastapi.responses import JSONResponse
values = managementInterface.db.getDistinctColumnValues(
FileItem, column, crossPagination, recordFilter
)
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
except Exception:
result = managementInterface.getAllFiles(pagination=None)
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in result]
return handleFilterValuesInMemory(items, column, pagination)
if mode == "ids":
recordFilter = {"sysCreatedBy": managementInterface.userId}
return handleIdsMode(managementInterface.db, FileItem, pagination, recordFilter)
recordFilter = None
if paginationParams and paginationParams.filters and "folderId" in paginationParams.filters:
fVal = paginationParams.filters.pop("folderId")
recordFilter = {"folderId": fVal}
result = managementInterface.getAllFiles(pagination=paginationParams, recordFilter=recordFilter)
# If pagination was requested, result is PaginatedResult
# If no pagination, result is List[FileItem]
if paginationParams:
return PaginatedResponse(
items=result.items,
@ -247,55 +274,6 @@ def get_files(
detail=f"Failed to get files: {str(e)}"
)
@router.get("/list/filter-values")
@limiter.limit("60/minute")
def get_file_filter_values(
request: Request,
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
currentUser: User = Depends(getCurrentUser),
context: RequestContext = Depends(getRequestContext)
) -> list:
"""Return distinct filter values for a column in files."""
try:
managementInterface = interfaceDbManagement.getInterface(
currentUser,
mandateId=str(context.mandateId) if context.mandateId else None,
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None
)
crossFilterPagination = None
if pagination:
try:
paginationDict = json.loads(pagination)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
filters = paginationDict.get("filters", {})
filters.pop(column, None)
paginationDict["filters"] = filters
paginationDict.pop("sort", None)
crossFilterPagination = PaginationParams(**paginationDict)
except (json.JSONDecodeError, ValueError):
pass
try:
recordFilter = {"sysCreatedBy": managementInterface.userId}
values = managementInterface.db.getDistinctColumnValues(
FileItem, column, crossFilterPagination, recordFilter
)
return sorted(values, key=lambda v: str(v).lower())
except Exception:
from modules.routes.routeDataUsers import _handleFilterValuesRequest
result = managementInterface.getAllFiles(pagination=None)
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in result]
return _handleFilterValuesRequest(items, column, pagination)
except Exception as e:
logger.error(f"Error getting filter values for files: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=str(e)
)
@router.post("/upload", status_code=status.HTTP_201_CREATED)
@limiter.limit("10/minute")

View file

@ -84,6 +84,8 @@ router = APIRouter(
def get_mandates(
request: Request,
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[Mandate]:
"""
@ -122,13 +124,50 @@ def get_mandates(
detail=f"Invalid pagination parameter: {str(e)}"
)
from modules.routes.routeHelpers import (
handleFilterValuesInMemory, handleIdsInMemory,
handleFilterValuesMode, handleIdsMode,
parseCrossFilterPagination,
)
appInterface = interfaceDbApp.getRootInterface()
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
if isSysAdmin:
crossPagination = parseCrossFilterPagination(column, pagination)
try:
from fastapi.responses import JSONResponse
values = appInterface.db.getDistinctColumnValues(Mandate, column, crossPagination)
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
except Exception:
result = appInterface.getAllMandates(pagination=None)
items = result if isinstance(result, list) else (result.items if hasattr(result, 'items') else result)
items = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
return handleFilterValuesInMemory(items, column, pagination)
else:
mandateItems = []
for mid in adminMandateIds:
m = appInterface.getMandate(mid)
if m and getattr(m, "enabled", True):
mandateItems.append(m.model_dump() if hasattr(m, 'model_dump') else m if isinstance(m, dict) else vars(m))
return handleFilterValuesInMemory(mandateItems, column, pagination)
if mode == "ids":
if isSysAdmin:
return handleIdsMode(appInterface.db, Mandate, pagination)
else:
mandateItems = []
for mid in adminMandateIds:
m = appInterface.getMandate(mid)
if m and getattr(m, "enabled", True):
mandateItems.append(m.model_dump() if hasattr(m, 'model_dump') else m if isinstance(m, dict) else vars(m))
return handleIdsInMemory(mandateItems, pagination)
if isSysAdmin:
# SysAdmin: all mandates
result = appInterface.getAllMandates(pagination=paginationParams)
else:
# MandateAdmin: only their enabled mandates
allMandates = []
for mandateId in adminMandateIds:
mandate = appInterface.getMandate(mandateId)
@ -136,10 +175,8 @@ def get_mandates(
mandateDict = mandate if isinstance(mandate, dict) else mandate.model_dump() if hasattr(mandate, 'model_dump') else vars(mandate)
allMandates.append(mandateDict)
result = allMandates
paginationParams = None # Client-side pagination for filtered results
paginationParams = None
# If pagination was requested, result is PaginatedResult
# If no pagination, result is List[Mandate]
if paginationParams and hasattr(result, 'items'):
return PaginatedResponse(
items=result.items,
@ -167,65 +204,6 @@ def get_mandates(
detail=f"Failed to get mandates: {str(e)}"
)
@router.get("/filter-values")
@limiter.limit("60/minute")
def get_mandate_filter_values(
request: Request,
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
context: RequestContext = Depends(getRequestContext)
) -> list:
"""Return distinct filter values for a column in mandates."""
try:
from modules.routes.routeDataUsers import _handleFilterValuesRequest
isSysAdmin = context.hasSysAdminRole
if not isSysAdmin:
adminMandateIds = _getAdminMandateIds(context)
if not adminMandateIds:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=routeApiMsg("Admin role required"))
appInterface = interfaceDbApp.getRootInterface()
if isSysAdmin:
# SysAdmin: try SQL DISTINCT for DB columns
crossFilterPagination = None
if pagination:
try:
paginationDict = json.loads(pagination)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
filters = paginationDict.get("filters", {})
filters.pop(column, None)
paginationDict["filters"] = filters
paginationDict.pop("sort", None)
crossFilterPagination = PaginationParams(**paginationDict)
except (json.JSONDecodeError, ValueError):
pass
try:
values = appInterface.db.getDistinctColumnValues(
Mandate, column, crossFilterPagination
)
return sorted(values, key=lambda v: str(v).lower())
except Exception:
result = appInterface.getAllMandates(pagination=None)
items = result if isinstance(result, list) else (result.items if hasattr(result, 'items') else result)
items = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
return _handleFilterValuesRequest(items, column, pagination)
else:
# MandateAdmin: in-memory (small set of individual mandate lookups)
result = []
for mid in adminMandateIds:
mandate = appInterface.getMandate(mid)
if mandate:
result.append(mandate if isinstance(mandate, dict) else mandate.model_dump() if hasattr(mandate, 'model_dump') else vars(mandate))
items = [i.model_dump() if hasattr(i, 'model_dump') else i for i in result]
return _handleFilterValuesRequest(items, column, pagination)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting filter values for mandates: {str(e)}")
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e))
@router.get("/{targetMandateId}", response_model=Mandate)
@limiter.limit("30/minute")
@ -475,6 +453,8 @@ def list_mandate_users(
request: Request,
targetMandateId: str = Path(..., description="ID of the mandate"),
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
):
"""
@ -556,7 +536,7 @@ def list_mandate_users(
continue
result.append({
"id": str(um.id), # UserMandate ID as primary key
"id": str(um.id),
"userId": str(user.id),
"username": user.username,
"email": user.email,
@ -566,57 +546,40 @@ def list_mandate_users(
"enabled": um.enabled
})
# Apply search, filtering, and sorting if pagination requested
from modules.routes.routeHelpers import (
handleFilterValuesInMemory, handleIdsInMemory,
_applyFiltersAndSort as _sharedApplyFiltersAndSort,
paginateInMemory,
)
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
return handleFilterValuesInMemory(result, column, pagination)
if mode == "ids":
return handleIdsInMemory(result, pagination)
if paginationParams:
# Apply search (if search term provided)
searchTerm = paginationParams.get('search', '').lower() if paginationParams.get('search') else ''
if searchTerm:
searchedResult = []
for item in result:
username = (item.get("username") or "").lower()
email = (item.get("email") or "").lower()
fullName = (item.get("fullName") or "").lower()
roleLabelsStr = " ".join(item.get("roleLabels") or []).lower()
if searchTerm in username or searchTerm in email or searchTerm in fullName or searchTerm in roleLabelsStr:
searchedResult.append(item)
result = searchedResult
# Apply filters (if filters provided)
filters = paginationParams.get('filters')
if filters:
for fieldName, filterValue in filters.items():
if filterValue is not None and filterValue != '':
filterValueLower = str(filterValue).lower()
result = [
item for item in result
if str(item.get(fieldName, '')).lower() == filterValueLower
]
# Apply sorting
sortFields = paginationParams.get('sort')
if sortFields:
for sortItem in reversed(sortFields):
field = sortItem.get('field')
direction = sortItem.get('direction', 'asc')
if field:
result = sorted(
result,
key=lambda x: str(x.get(field, '') or '').lower(),
reverse=(direction == 'desc')
)
# Apply pagination
page = paginationParams.get('page', 1)
pageSize = paginationParams.get('pageSize', 25)
totalItems = len(result)
paginationParamsObj = None
try:
paginationDict = json.loads(pagination) if pagination else None
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
paginationParamsObj = PaginationParams(**paginationDict)
except Exception:
pass
filtered = _sharedApplyFiltersAndSort(result, paginationParamsObj)
totalItems = len(filtered)
page = paginationParams.get('page', 1) if isinstance(paginationParams, dict) else 1
pageSize = paginationParams.get('pageSize', 25) if isinstance(paginationParams, dict) else 25
totalPages = (totalItems + pageSize - 1) // pageSize if totalItems > 0 else 0
startIdx = (page - 1) * pageSize
endIdx = startIdx + pageSize
paginatedResult = result[startIdx:endIdx]
return {
"items": paginatedResult,
"items": filtered[startIdx:endIdx],
"pagination": {
"currentPage": page,
"pageSize": pageSize,
@ -625,7 +588,6 @@ def list_mandate_users(
}
}
# No pagination - return all users as list
return result
except HTTPException:
@ -638,63 +600,6 @@ def list_mandate_users(
)
@router.get("/{targetMandateId}/users/filter-values")
@limiter.limit("60/minute")
def get_mandate_users_filter_values(
request: Request,
targetMandateId: str = Path(..., description="ID of the mandate"),
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
context: RequestContext = Depends(getRequestContext)
) -> list:
"""Return distinct filter values for a column in mandate users."""
if not _hasMandateAdminRole(context, targetMandateId) and not context.hasSysAdminRole:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=routeApiMsg("Mandate-Admin role required"))
try:
from modules.routes.routeDataUsers import _handleFilterValuesRequest
rootInterface = interfaceDbApp.getRootInterface()
mandate = rootInterface.getMandate(targetMandateId)
if not mandate:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Mandate {targetMandateId} not found")
userMandates = rootInterface.getUserMandatesByMandate(targetMandateId)
result = []
for um in userMandates:
user = rootInterface.getUser(str(um.userId))
if not user:
continue
roleIds = rootInterface.getRoleIdsForUserMandate(str(um.id))
roleLabels = []
filteredRoleIds = []
seenLabels = set()
for roleId in roleIds:
role = rootInterface.getRole(roleId)
if role:
if role.featureInstanceId:
continue
filteredRoleIds.append(roleId)
if role.roleLabel not in seenLabels:
roleLabels.append(role.roleLabel)
seenLabels.add(role.roleLabel)
result.append({
"id": str(um.id),
"userId": str(user.id),
"username": user.username,
"email": user.email,
"fullName": user.fullName,
"roleIds": filteredRoleIds,
"roleLabels": roleLabels,
"enabled": um.enabled
})
return _handleFilterValuesRequest(result, column, pagination)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting filter values for mandate users: {str(e)}")
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e))
@router.post("/{targetMandateId}/users", response_model=UserMandateResponse)
@limiter.limit("30/minute")
def add_user_to_mandate(

View file

@ -27,44 +27,52 @@ router = APIRouter(
responses={404: {"description": "Not found"}}
)
@router.get("", response_model=PaginatedResponse[Prompt])
@router.get("")
@limiter.limit("30/minute")
def get_prompts(
request: Request,
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
currentUser: User = Depends(getCurrentUser)
) -> PaginatedResponse[Prompt]:
):
"""
Get prompts with optional pagination, sorting, and filtering.
Query Parameters:
- pagination: JSON-encoded PaginationParams object, or None for no pagination
Examples:
- GET /api/prompts (no pagination - returns all items)
- GET /api/prompts?pagination={"page":1,"pageSize":10,"sort":[]}
- GET /api/prompts?pagination={"page":2,"pageSize":20,"sort":[{"field":"name","direction":"asc"}]}
Modes:
- None: paginated list (default)
- filterValues: distinct values for a column (cross-filtered)
- ids: all IDs matching current filters
"""
# Parse pagination parameter
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
managementInterface = interfaceDbManagement.getInterface(currentUser)
result = managementInterface.getAllPrompts(pagination=None)
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in result]
return handleFilterValuesInMemory(items, column, pagination)
if mode == "ids":
managementInterface = interfaceDbManagement.getInterface(currentUser)
result = managementInterface.getAllPrompts(pagination=None)
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in result]
return handleIdsInMemory(items, pagination)
paginationParams = None
if pagination:
try:
paginationDict = json.loads(pagination)
if paginationDict:
# Normalize pagination dict (handles top-level "search" field)
paginationDict = normalize_pagination_dict(paginationDict)
paginationParams = PaginationParams(**paginationDict)
except (json.JSONDecodeError, ValueError) as e:
raise HTTPException(
status_code=400,
detail=f"Invalid pagination parameter: {str(e)}"
)
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
managementInterface = interfaceDbManagement.getInterface(currentUser)
result = managementInterface.getAllPrompts(pagination=paginationParams)
# If pagination was requested, result is PaginatedResult
# If no pagination, result is List[Prompt]
if paginationParams:
return PaginatedResponse(
items=result.items,
@ -83,28 +91,6 @@ def get_prompts(
pagination=None
)
@router.get("/filter-values")
@limiter.limit("60/minute")
def get_prompt_filter_values(
request: Request,
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
currentUser: User = Depends(getCurrentUser)
) -> list:
"""Return distinct filter values for a column in prompts.
NOTE: Cannot use db.getDistinctColumnValues() because visibility rules
(own + system for regular users) require pre-filtering the recordset.
"""
try:
from modules.routes.routeDataUsers import _handleFilterValuesRequest
managementInterface = interfaceDbManagement.getInterface(currentUser)
result = managementInterface.getAllPrompts(pagination=None)
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in result]
return _handleFilterValuesRequest(items, column, pagination)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.post("", response_model=Prompt)
@limiter.limit("10/minute")

View file

@ -24,7 +24,7 @@ from modules.auth import limiter, getRequestContext, RequestContext
from modules.datamodels.datamodelUam import User, UserInDB, AuthAuthority
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata, normalize_pagination_dict
from modules.shared.i18nRegistry import apiRouteContext, resolveText
from modules.shared.i18nRegistry import apiRouteContext
routeApiMsg = apiRouteContext("routeDataUsers")
# Configure logger
@ -71,206 +71,72 @@ def _isAdminForUser(context: RequestContext, targetUserId: str) -> bool:
return False
def _extractDistinctValues(
items: List[Dict[str, Any]],
columnKey: str,
requestLang: Optional[str] = None,
) -> List[str]:
"""Extract sorted distinct display values for a column from enriched items."""
values = set()
for item in items:
val = item.get(columnKey)
if val is None or val == "":
continue
if isinstance(val, bool):
values.add("true" if val else "false")
elif isinstance(val, (int, float)):
values.add(str(val))
elif isinstance(val, dict):
text = resolveText(val, requestLang)
if text:
values.add(text)
else:
values.add(str(val))
return sorted(values, key=lambda v: v.lower())
def _getUserFilterOrIds(context, paginationJson, column=None, idsMode=False):
"""Unified handler for mode=filterValues and mode=ids across all user scoping branches."""
from modules.routes.routeHelpers import (
handleFilterValuesInMemory, handleIdsInMemory,
handleFilterValuesMode, handleIdsMode,
parseCrossFilterPagination,
)
try:
appInterface = interfaceDbApp.getInterface(context.user, mandateId=context.mandateId)
requestLang = getattr(context.user, "language", None)
if context.mandateId:
result = appInterface.getUsersByMandate(str(context.mandateId), None)
users = result if isinstance(result, list) else (result.items if hasattr(result, 'items') else [])
items = [u.model_dump() if hasattr(u, 'model_dump') else u for u in users]
if idsMode:
return handleIdsInMemory(items, paginationJson)
return handleFilterValuesInMemory(items, column, paginationJson, requestLang)
def _handleFilterValuesRequest(
items: List[Dict[str, Any]],
column: str,
paginationJson: Optional[str] = None,
requestLang: Optional[str] = None,
) -> List[str]:
"""
Generic handler for /filter-values endpoints.
Applies all active filters EXCEPT the one for the requested column (cross-filtering),
then extracts distinct values for that column.
"""
crossFilterParams: Optional[PaginationParams] = None
if paginationJson:
try:
import json
paginationDict = json.loads(paginationJson)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
filters = paginationDict.get("filters", {})
filters.pop(column, None)
paginationDict["filters"] = filters
paginationDict.pop("sort", None)
crossFilterParams = PaginationParams(**paginationDict)
except (json.JSONDecodeError, ValueError):
pass
if context.hasSysAdminRole:
rootInterface = getRootInterface()
if idsMode:
return handleIdsMode(rootInterface.db, UserInDB, paginationJson)
crossPagination = parseCrossFilterPagination(column, paginationJson)
try:
from fastapi.responses import JSONResponse
values = rootInterface.db.getDistinctColumnValues(UserInDB, column, crossPagination)
return JSONResponse(content=sorted(values, key=lambda v: v.lower()))
except Exception:
users = appInterface.getAllUsers()
items = [u.model_dump() if hasattr(u, 'model_dump') else u for u in users]
return handleFilterValuesInMemory(items, column, paginationJson, requestLang)
crossFiltered = _applyFiltersAndSort(items, crossFilterParams)
return _extractDistinctValues(crossFiltered, column, requestLang)
def _applyFiltersAndSort(items: List[Dict[str, Any]], paginationParams: Optional[PaginationParams]) -> List[Dict[str, Any]]:
"""
Apply filters and sorting to a list of items.
This is used when we can't do server-side filtering in the database (e.g., SysAdmin view).
Args:
items: List of dictionaries to filter/sort
paginationParams: Pagination parameters with filters and sort
Returns:
Filtered and sorted list
"""
if not paginationParams:
return items
result = items.copy()
# Apply filters
if paginationParams.filters:
filters = paginationParams.filters
# Handle general search
searchTerm = filters.get('search', '').lower() if filters.get('search') else None
if searchTerm:
def matchesSearch(item: Dict[str, Any]) -> bool:
for value in item.values():
if value is not None and searchTerm in str(value).lower():
return True
return False
result = [item for item in result if matchesSearch(item)]
# Handle field-specific filters
for field, filterValue in filters.items():
if field == 'search':
continue # Already handled
if isinstance(filterValue, dict) and 'operator' in filterValue:
operator = filterValue.get('operator', 'equals')
value = filterValue.get('value')
else:
operator = 'equals'
value = filterValue
if value is None or value == '':
rootInterface = getRootInterface()
userMandates = rootInterface.getUserMandates(str(context.user.id))
adminMandateIds = []
for um in userMandates:
umId = getattr(um, 'id', None)
mandateId = getattr(um, 'mandateId', None)
if not umId or not mandateId:
continue
def matchesFilter(item: Dict[str, Any], f: str, op: str, v: Any) -> bool:
itemValue = item.get(f)
if itemValue is None:
return False
itemStr = str(itemValue).lower()
valueStr = str(v).lower()
if op in ('equals', 'eq'):
return itemStr == valueStr
elif op == 'contains':
return valueStr in itemStr
elif op == 'startsWith':
return itemStr.startswith(valueStr)
elif op == 'endsWith':
return itemStr.endswith(valueStr)
elif op in ('gt', 'gte', 'lt', 'lte'):
try:
itemNum = float(itemValue)
valueNum = float(v)
if op == 'gt':
return itemNum > valueNum
elif op == 'gte':
return itemNum >= valueNum
elif op == 'lt':
return itemNum < valueNum
elif op == 'lte':
return itemNum <= valueNum
except (ValueError, TypeError):
return False
elif op == 'between':
if isinstance(v, dict):
fromVal = v.get('from', '')
toVal = v.get('to', '')
if not fromVal and not toVal:
return True
# Date range: from/to are YYYY-MM-DD strings, itemValue may be Unix timestamp
try:
from datetime import datetime, timezone
fromTs = None
toTs = None
if fromVal:
fromTs = datetime.strptime(str(fromVal), '%Y-%m-%d').replace(tzinfo=timezone.utc).timestamp()
if toVal:
toTs = datetime.strptime(str(toVal), '%Y-%m-%d').replace(hour=23, minute=59, second=59, tzinfo=timezone.utc).timestamp()
itemNum = float(itemValue) if not isinstance(itemValue, (int, float)) else itemValue
# Normalize: if item looks like a millisecond timestamp, convert to seconds
if itemNum > 10000000000:
itemNum = itemNum / 1000
if fromTs is not None and toTs is not None:
return fromTs <= itemNum <= toTs
elif fromTs is not None:
return itemNum >= fromTs
elif toTs is not None:
return itemNum <= toTs
except (ValueError, TypeError):
# Fallback: string comparison (for non-numeric date fields)
fromStr = str(fromVal).lower() if fromVal else ''
toStr = str(toVal).lower() if toVal else ''
if fromStr and toStr:
return fromStr <= itemStr <= toStr
elif fromStr:
return itemStr >= fromStr
elif toStr:
return itemStr <= toStr
return True
elif op == 'in':
if isinstance(v, list):
return itemStr in [str(x).lower() for x in v]
return False
elif op == 'notIn':
if isinstance(v, list):
return itemStr not in [str(x).lower() for x in v]
return True
return True
result = [item for item in result if matchesFilter(item, field, operator, value)]
# Apply sorting — None values always last
if paginationParams.sort:
for sortField in reversed(paginationParams.sort):
fieldName = sortField.field
ascending = sortField.direction == 'asc'
noneItems = [item for item in result if item.get(fieldName) is None]
nonNoneItems = [item for item in result if item.get(fieldName) is not None]
def getSortKey(item: Dict[str, Any], _fn=fieldName):
value = item.get(_fn)
if isinstance(value, bool):
return (0, int(value), '')
if isinstance(value, (int, float)):
return (0, value, '')
return (1, 0, str(value).lower())
nonNoneItems = sorted(nonNoneItems, key=getSortKey, reverse=not ascending)
result = nonNoneItems + noneItems
return result
roleIds = rootInterface.getRoleIdsForUserMandate(str(umId))
for roleId in roleIds:
role = rootInterface.getRole(roleId)
if role and role.roleLabel == "admin" and not role.featureInstanceId:
adminMandateIds.append(str(mandateId))
break
if not adminMandateIds:
return []
from modules.datamodels.datamodelMembership import UserMandate as UserMandateModel
allUM = rootInterface.db.getRecordset(UserMandateModel, recordFilter={"mandateId": adminMandateIds})
uniqueUserIds = list({
(um.get("userId") if isinstance(um, dict) else getattr(um, "userId", None))
for um in (allUM or [])
if (um.get("userId") if isinstance(um, dict) else getattr(um, "userId", None))
})
batchUsers = rootInterface.getUsersByIds(uniqueUserIds) if uniqueUserIds else {}
items = [u.model_dump() if hasattr(u, 'model_dump') else vars(u) for u in batchUsers.values()]
if idsMode:
return handleIdsInMemory(items, paginationJson)
return handleFilterValuesInMemory(items, column, paginationJson, requestLang)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error in _getUserFilterOrIds: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
router = APIRouter(
@ -326,6 +192,8 @@ def get_user_options(
def get_users(
request: Request,
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
) -> PaginatedResponse[User]:
"""
@ -340,8 +208,15 @@ def get_users(
- GET /api/users/ (no pagination - returns all users in mandate)
- GET /api/users/?pagination={"page":1,"pageSize":10,"sort":[]}
"""
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
return _getUserFilterOrIds(context, pagination, column=column)
if mode == "ids":
return _getUserFilterOrIds(context, pagination, idsMode=True)
try:
# Parse pagination parameter
paginationParams = None
if pagination:
try:
@ -357,8 +232,6 @@ def get_users(
appInterface = interfaceDbApp.getInterface(context.user, mandateId=context.mandateId)
# MULTI-TENANT: Use mandateId from context (header)
# SysAdmin without mandateId can see all users
if context.mandateId:
# Get users for specific mandate using getUsersByMandate
result = appInterface.getUsersByMandate(str(context.mandateId), paginationParams)
@ -443,8 +316,8 @@ def get_users(
for u in batchUsers.values()
]
# Apply server-side filtering and sorting
filteredUsers = _applyFiltersAndSort(allUsers, paginationParams)
from modules.routes.routeHelpers import _applyFiltersAndSort as _applyFiltersAndSortHelper
filteredUsers = _applyFiltersAndSortHelper(allUsers, paginationParams)
users = [User(**u) for u in filteredUsers]
if paginationParams:
@ -480,86 +353,6 @@ def get_users(
detail=f"Failed to get users: {str(e)}"
)
@router.get("/filter-values")
@limiter.limit("60/minute")
def get_user_filter_values(
request: Request,
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
context: RequestContext = Depends(getRequestContext)
) -> list:
"""Return distinct filter values for a column in users."""
try:
appInterface = interfaceDbApp.getInterface(context.user, mandateId=context.mandateId)
# Build cross-filter pagination (all filters except the requested column)
crossFilterPagination = None
if pagination:
try:
paginationDict = json.loads(pagination)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
filters = paginationDict.get("filters", {})
filters.pop(column, None)
paginationDict["filters"] = filters
paginationDict.pop("sort", None)
crossFilterPagination = PaginationParams(**paginationDict)
except (json.JSONDecodeError, ValueError):
pass
if context.mandateId:
# Mandate-scoped: in-memory (users require UserMandate join)
result = appInterface.getUsersByMandate(str(context.mandateId), None)
users = result if isinstance(result, list) else (result.items if hasattr(result, 'items') else [])
items = [u.model_dump() if hasattr(u, 'model_dump') else u for u in users]
return _handleFilterValuesRequest(items, column, pagination, getattr(context.user, "language", None))
elif context.hasSysAdminRole:
# SysAdmin: use SQL DISTINCT for DB columns
try:
rootInterface = getRootInterface()
values = rootInterface.db.getDistinctColumnValues(
UserInDB, column, crossFilterPagination
)
return sorted(values, key=lambda v: v.lower())
except Exception:
users = appInterface.getAllUsers()
items = [u.model_dump() if hasattr(u, 'model_dump') else u for u in users]
return _handleFilterValuesRequest(items, column, pagination, getattr(context.user, "language", None))
else:
# Non-admin multi-mandate: aggregate across admin mandates (in-memory)
rootInterface = getRootInterface()
userMandates = rootInterface.getUserMandates(str(context.user.id))
adminMandateIds = []
for um in userMandates:
umId = getattr(um, 'id', None)
mandateId = getattr(um, 'mandateId', None)
if not umId or not mandateId:
continue
roleIds = rootInterface.getRoleIdsForUserMandate(str(umId))
for roleId in roleIds:
role = rootInterface.getRole(roleId)
if role and role.roleLabel == "admin" and not role.featureInstanceId:
adminMandateIds.append(str(mandateId))
break
if not adminMandateIds:
return []
from modules.datamodels.datamodelMembership import UserMandate as UserMandateModel
allUM = rootInterface.db.getRecordset(UserMandateModel, recordFilter={"mandateId": adminMandateIds})
uniqueUserIds = list({
(um.get("userId") if isinstance(um, dict) else getattr(um, "userId", None))
for um in (allUM or [])
if (um.get("userId") if isinstance(um, dict) else getattr(um, "userId", None))
})
batchUsers = rootInterface.getUsersByIds(uniqueUserIds) if uniqueUserIds else {}
items = [u.model_dump() if hasattr(u, 'model_dump') else vars(u) for u in batchUsers.values()]
return _handleFilterValuesRequest(items, column, pagination, getattr(context.user, "language", None))
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting filter values for users: {str(e)}")
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e))
@router.get("/{userId}", response_model=User)
@limiter.limit("30/minute")
def get_user(

View file

@ -0,0 +1,534 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""
Shared helpers for route handlers.
Provides unified logic for:
- mode=filterValues: distinct column values for filter dropdowns (cross-filtered)
- mode=ids: all IDs matching current filters (for bulk selection)
- In-memory equivalents for enriched/non-SQL routes
"""
import copy
import json
import logging
from typing import Any, Dict, List, Optional, Callable
from fastapi.responses import JSONResponse
from modules.datamodels.datamodelPagination import (
PaginationParams,
normalize_pagination_dict,
)
from modules.shared.i18nRegistry import resolveText
logger = logging.getLogger(__name__)
# ---------------------------------------------------------------------------
# Central FK label resolvers (cross-DB)
# ---------------------------------------------------------------------------
def _resolveMandateLabels(ids: List[str]) -> Dict[str, str]:
from modules.interfaces.interfaceDbApp import getRootInterface
rootIface = getRootInterface()
mMap = rootIface.getMandatesByIds(ids)
return {
mid: getattr(m, "label", None) or getattr(m, "name", mid) or mid
for mid, m in mMap.items()
}
def _resolveInstanceLabels(ids: List[str]) -> Dict[str, str]:
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.interfaces.interfaceFeatures import getFeatureInterface
rootIface = getRootInterface()
featureIface = getFeatureInterface(rootIface.db)
result: Dict[str, str] = {}
for iid in ids:
fi = featureIface.getFeatureInstance(iid)
result[iid] = fi.label if fi and fi.label else iid
return result
def _resolveUserLabels(ids: List[str]) -> Dict[str, str]:
from modules.interfaces.interfaceDbApp import getRootInterface
rootIface = getRootInterface()
users = rootIface.db.getRecordset(
__import__("modules.datamodels.datamodelUam", fromlist=["User"]).User,
recordFilter={"id": list(set(ids))},
)
result: Dict[str, str] = {}
for u in (users or []):
uid = u.get("id", "")
result[uid] = u.get("username") or u.get("email") or uid
return result
_BUILTIN_FK_RESOLVERS: Dict[str, Callable[[List[str]], Dict[str, str]]] = {
"Mandate": _resolveMandateLabels,
"FeatureInstance": _resolveInstanceLabels,
"User": _resolveUserLabels,
}
def _buildLabelResolversFromModel(modelClass: type) -> Dict[str, Callable[[List[str]], Dict[str, str]]]:
"""
Auto-build labelResolvers dict from fk_model annotations on a Pydantic model.
Maps field names to resolver functions for all fields that have a known fk_model.
"""
from modules.connectors.connectorDbPostgre import _get_fk_sort_meta
fkMeta = _get_fk_sort_meta(modelClass)
resolvers: Dict[str, Callable[[List[str]], Dict[str, str]]] = {}
for fieldName, meta in fkMeta.items():
fkModelName = meta.get("model", "")
if fkModelName in _BUILTIN_FK_RESOLVERS:
resolvers[fieldName] = _BUILTIN_FK_RESOLVERS[fkModelName]
return resolvers
# ---------------------------------------------------------------------------
# Cross-filter pagination parsing
# ---------------------------------------------------------------------------
def parseCrossFilterPagination(
column: str,
paginationJson: Optional[str],
) -> Optional[PaginationParams]:
"""
Parse pagination JSON, remove the requested column from filters (cross-filtering),
and drop sort used for filter-values requests.
"""
if not paginationJson:
return None
try:
paginationDict = json.loads(paginationJson)
if not paginationDict:
return None
paginationDict = normalize_pagination_dict(paginationDict)
filters = paginationDict.get("filters", {})
filters.pop(column, None)
paginationDict["filters"] = filters
paginationDict.pop("sort", None)
return PaginationParams(**paginationDict)
except (json.JSONDecodeError, ValueError, TypeError):
return None
def parsePaginationForIds(
paginationJson: Optional[str],
) -> Optional[PaginationParams]:
"""
Parse pagination JSON for mode=ids keep filters, drop sort and page/pageSize.
"""
if not paginationJson:
return None
try:
paginationDict = json.loads(paginationJson)
if not paginationDict:
return None
paginationDict = normalize_pagination_dict(paginationDict)
paginationDict.pop("sort", None)
return PaginationParams(**paginationDict)
except (json.JSONDecodeError, ValueError, TypeError):
return None
# ---------------------------------------------------------------------------
# SQL-based helpers (delegate to DB connector)
# ---------------------------------------------------------------------------
def handleFilterValuesMode(
db,
modelClass: type,
column: str,
paginationJson: Optional[str] = None,
recordFilter: Optional[Dict[str, Any]] = None,
enrichFn: Optional[Callable[[str, Optional[PaginationParams], Optional[Dict[str, Any]]], List[str]]] = None,
) -> List[str]:
"""
SQL-based distinct column values with cross-filtering.
If enrichFn is provided and the column is enriched (computed/joined),
enrichFn(column, crossPagination, recordFilter) is called instead of SQL DISTINCT.
"""
crossPagination = parseCrossFilterPagination(column, paginationJson)
if enrichFn:
try:
result = enrichFn(column, crossPagination, recordFilter)
if result is not None:
return JSONResponse(content=result)
except Exception as e:
logger.warning(f"handleFilterValuesMode enrichFn failed for {column}: {e}")
try:
values = db.getDistinctColumnValues(
modelClass, column,
pagination=crossPagination,
recordFilter=recordFilter,
) or []
return JSONResponse(content=values)
except Exception as e:
logger.error(f"handleFilterValuesMode SQL failed for {modelClass.__name__}.{column}: {e}")
return JSONResponse(content=[])
def handleIdsMode(
db,
modelClass: type,
paginationJson: Optional[str] = None,
recordFilter: Optional[Dict[str, Any]] = None,
idField: str = "id",
) -> List[str]:
"""
Return all IDs matching the current filters (no LIMIT/OFFSET).
Uses the same WHERE clause as getRecordsetPaginated.
"""
pagination = parsePaginationForIds(paginationJson)
table = modelClass.__name__
try:
if not db._ensureTableExists(modelClass):
return JSONResponse(content=[])
where_clause, _, _, values, _ = db._buildPaginationClauses(
modelClass, pagination, recordFilter,
)
sql = f'SELECT "{idField}"::TEXT AS val FROM "{table}"{where_clause} ORDER BY "{idField}"'
with db.connection.cursor() as cursor:
cursor.execute(sql, values)
return JSONResponse(content=[row["val"] for row in cursor.fetchall()])
except Exception as e:
logger.error(f"handleIdsMode failed for {table}: {e}")
return JSONResponse(content=[])
# ---------------------------------------------------------------------------
# In-memory helpers (for enriched / non-SQL routes)
# ---------------------------------------------------------------------------
def _applyFiltersAndSort(
items: List[Dict[str, Any]],
paginationParams: Optional[PaginationParams],
) -> List[Dict[str, Any]]:
"""
Apply filters and sorting to a list of dicts in-memory.
Does NOT paginate (no page/pageSize slicing).
"""
if not paginationParams:
return items
result = list(items)
if paginationParams.filters:
filters = paginationParams.filters
searchTerm = filters.get("search", "").lower() if filters.get("search") else None
if searchTerm:
result = [
item for item in result
if any(
searchTerm in str(v).lower()
for v in item.values()
if v is not None
)
]
for field, filterValue in filters.items():
if field == "search":
continue
if isinstance(filterValue, dict) and "operator" in filterValue:
operator = filterValue.get("operator", "equals")
value = filterValue.get("value")
else:
operator = "equals"
value = filterValue
if value is None or value == "":
continue
result = [
item for item in result
if _matchesFilter(item, field, operator, value)
]
if paginationParams.sort:
for sortField in reversed(paginationParams.sort):
fieldName = sortField.field
ascending = sortField.direction == "asc"
noneItems = [item for item in result if item.get(fieldName) is None]
nonNoneItems = [item for item in result if item.get(fieldName) is not None]
def _getSortKey(item: Dict[str, Any], _fn=fieldName):
value = item.get(_fn)
if isinstance(value, bool):
return (0, int(value), "")
if isinstance(value, (int, float)):
return (0, value, "")
return (1, 0, str(value).lower())
nonNoneItems = sorted(nonNoneItems, key=_getSortKey, reverse=not ascending)
result = nonNoneItems + noneItems
return result
def _matchesFilter(item: Dict[str, Any], field: str, operator: str, value: Any) -> bool:
"""Single-field filter match for in-memory filtering."""
itemValue = item.get(field)
if itemValue is None:
return False
itemStr = str(itemValue).lower()
valueStr = str(value).lower()
if operator in ("equals", "eq"):
return itemStr == valueStr
if operator == "contains":
return valueStr in itemStr
if operator == "startsWith":
return itemStr.startswith(valueStr)
if operator == "endsWith":
return itemStr.endswith(valueStr)
if operator in ("gt", "gte", "lt", "lte"):
try:
itemNum = float(itemValue)
valueNum = float(value)
if operator == "gt":
return itemNum > valueNum
if operator == "gte":
return itemNum >= valueNum
if operator == "lt":
return itemNum < valueNum
return itemNum <= valueNum
except (ValueError, TypeError):
return False
if operator == "between":
return _matchesBetween(itemValue, itemStr, value)
if operator == "in":
if isinstance(value, list):
return itemStr in [str(x).lower() for x in value]
return False
if operator == "notIn":
if isinstance(value, list):
return itemStr not in [str(x).lower() for x in value]
return True
return True
def _matchesBetween(itemValue: Any, itemStr: str, value: Any) -> bool:
"""Handle 'between' operator for date ranges and numeric ranges."""
if not isinstance(value, dict):
return True
fromVal = value.get("from", "")
toVal = value.get("to", "")
if not fromVal and not toVal:
return True
try:
from datetime import datetime, timezone
fromTs = None
toTs = None
if fromVal:
fromTs = datetime.strptime(str(fromVal), "%Y-%m-%d").replace(tzinfo=timezone.utc).timestamp()
if toVal:
toTs = datetime.strptime(str(toVal), "%Y-%m-%d").replace(
hour=23, minute=59, second=59, tzinfo=timezone.utc
).timestamp()
itemNum = float(itemValue) if not isinstance(itemValue, (int, float)) else itemValue
if itemNum > 10000000000:
itemNum = itemNum / 1000
if fromTs is not None and toTs is not None:
return fromTs <= itemNum <= toTs
if fromTs is not None:
return itemNum >= fromTs
if toTs is not None:
return itemNum <= toTs
except (ValueError, TypeError):
fromStr = str(fromVal).lower() if fromVal else ""
toStr = str(toVal).lower() if toVal else ""
if fromStr and toStr:
return fromStr <= itemStr <= toStr
if fromStr:
return itemStr >= fromStr
if toStr:
return itemStr <= toStr
return True
def _extractDistinctValues(
items: List[Dict[str, Any]],
columnKey: str,
requestLang: Optional[str] = None,
) -> List[str]:
"""Extract sorted distinct display values for a column from enriched items."""
values = set()
for item in items:
val = item.get(columnKey)
if val is None or val == "":
continue
if isinstance(val, bool):
values.add("true" if val else "false")
elif isinstance(val, (int, float)):
values.add(str(val))
elif isinstance(val, dict):
text = resolveText(val, requestLang)
if text:
values.add(text)
else:
values.add(str(val))
return sorted(values, key=lambda v: v.lower())
def handleFilterValuesInMemory(
items: List[Dict[str, Any]],
column: str,
paginationJson: Optional[str] = None,
requestLang: Optional[str] = None,
) -> JSONResponse:
"""
In-memory filter-values: apply cross-filters, then extract distinct values.
For routes that build enriched in-memory lists.
Returns JSONResponse to bypass FastAPI response_model validation.
"""
crossFilterParams = parseCrossFilterPagination(column, paginationJson)
crossFiltered = _applyFiltersAndSort(items, crossFilterParams)
return JSONResponse(content=_extractDistinctValues(crossFiltered, column, requestLang))
def handleIdsInMemory(
items: List[Dict[str, Any]],
paginationJson: Optional[str] = None,
idField: str = "id",
) -> JSONResponse:
"""
In-memory IDs: apply filters, return all IDs.
For routes that build enriched in-memory lists.
Returns JSONResponse to bypass FastAPI response_model validation.
"""
pagination = parsePaginationForIds(paginationJson)
filtered = _applyFiltersAndSort(items, pagination)
ids = []
for item in filtered:
val = item.get(idField)
if val is not None:
ids.append(str(val))
return JSONResponse(content=ids)
def getRecordsetPaginatedWithFkSort(
db,
modelClass: type,
pagination,
recordFilter: Optional[Dict[str, Any]] = None,
labelResolvers: Optional[Dict[str, Callable[[List[str]], Dict[str, str]]]] = None,
fieldFilter: Optional[List[str]] = None,
idField: str = "id",
) -> Dict[str, Any]:
"""
Wrapper around db.getRecordsetPaginated that handles FK-label sorting.
If the current sort field is a FK with a registered labelResolver, the
function fetches all filtered IDs + FK values, resolves labels cross-DB,
sorts in-memory by label, and returns only the requested page.
If no FK sort is active, delegates directly to db.getRecordsetPaginated.
"""
import math
if not pagination or not pagination.sort:
return db.getRecordsetPaginated(modelClass, pagination, recordFilter, fieldFilter)
if labelResolvers is None:
labelResolvers = _buildLabelResolversFromModel(modelClass)
if not labelResolvers:
return db.getRecordsetPaginated(modelClass, pagination, recordFilter, fieldFilter)
fkSortField = None
fkSortDir = "asc"
for sf in pagination.sort:
sfField = sf.get("field") if isinstance(sf, dict) else getattr(sf, "field", None)
sfDir = sf.get("direction", "asc") if isinstance(sf, dict) else getattr(sf, "direction", "asc")
if sfField and sfField in labelResolvers:
fkSortField = sfField
fkSortDir = str(sfDir).lower()
break
if not fkSortField:
return db.getRecordsetPaginated(modelClass, pagination, recordFilter, fieldFilter)
try:
distinctIds = db.getDistinctColumnValues(
modelClass, fkSortField, recordFilter=recordFilter,
) or []
labelMap = {}
if distinctIds:
try:
labelMap = labelResolvers[fkSortField](distinctIds)
except Exception as e:
logger.warning(f"getRecordsetPaginatedWithFkSort: resolver for {fkSortField} failed: {e}")
filterOnlyPagination = copy.deepcopy(pagination)
filterOnlyPagination.sort = []
filterOnlyPagination.page = 1
filterOnlyPagination.pageSize = 999999
lightRows = db.getRecordsetPaginated(
modelClass, filterOnlyPagination, recordFilter,
fieldFilter=[idField, fkSortField],
)
allRows = lightRows.get("items", [])
totalItems = len(allRows)
if totalItems == 0:
return {"items": [], "totalItems": 0, "totalPages": 0}
def _sortKey(row):
fkVal = row.get(fkSortField, "") or ""
label = labelMap.get(str(fkVal), str(fkVal)).lower()
return label
reverse = fkSortDir == "desc"
allRows.sort(key=_sortKey, reverse=reverse)
pageSize = pagination.pageSize
offset = (pagination.page - 1) * pageSize
pageSlice = allRows[offset:offset + pageSize]
pageIds = [row[idField] for row in pageSlice if row.get(idField)]
if not pageIds:
return {"items": [], "totalItems": totalItems, "totalPages": math.ceil(totalItems / pageSize)}
pageItems = db.getRecordset(modelClass, recordFilter={idField: pageIds}, fieldFilter=fieldFilter)
idOrder = {pid: idx for idx, pid in enumerate(pageIds)}
pageItems.sort(key=lambda r: idOrder.get(r.get(idField), 999999))
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
return {"items": pageItems, "totalItems": totalItems, "totalPages": totalPages}
except Exception as e:
logger.error(f"getRecordsetPaginatedWithFkSort failed for {modelClass.__name__}: {e}")
return db.getRecordsetPaginated(modelClass, pagination, recordFilter, fieldFilter)
def paginateInMemory(
items: List[Dict[str, Any]],
paginationParams: Optional[PaginationParams],
) -> tuple:
"""
Apply pagination (page/pageSize slicing) to an already-filtered+sorted list.
Returns (pageItems, totalItems).
"""
totalItems = len(items)
if not paginationParams:
return items, totalItems
offset = (paginationParams.page - 1) * paginationParams.pageSize
pageItems = items[offset:offset + paginationParams.pageSize]
return pageItems, totalItems

View file

@ -21,7 +21,7 @@ from pydantic import BaseModel, Field, model_validator
from modules.auth import limiter, getRequestContext, RequestContext, getCurrentUser
from modules.datamodels.datamodelUam import User
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
from modules.routes.routeDataUsers import _applyFiltersAndSort
from modules.routes.routeHelpers import _applyFiltersAndSort, handleFilterValuesInMemory, handleIdsInMemory
from modules.datamodels.datamodelInvitation import Invitation
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.shared.timeUtils import getUtcTimestamp
@ -408,6 +408,8 @@ def list_invitations(
includeUsed: bool = Query(False, description="Include already used invitations"),
includeExpired: bool = Query(False, description="Include expired invitations"),
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext)
):
"""
@ -439,41 +441,49 @@ def list_invitations(
detail=routeApiMsg("Mandate-Admin role required to list invitations")
)
try:
def _buildInvitationItems():
rootInterface = getRootInterface()
# Get all invitations for this mandate (Pydantic models)
allInvitations = rootInterface.getInvitationsByMandate(str(context.mandateId))
currentTime = getUtcTimestamp()
result = []
items = []
for inv in allInvitations:
# Skip revoked invitations
if inv.revokedAt:
continue
# Filter by usage
currentUses = inv.currentUses or 0
maxUses = inv.maxUses or 1
if not includeUsed and currentUses >= maxUses:
continue
# Filter by expiration
expiresAt = inv.expiresAt or 0
if not includeExpired and expiresAt < currentTime:
continue
# Build invite URL using frontend URL provided by the caller
baseUrl = frontendUrl.rstrip("/")
inviteUrl = f"{baseUrl}/invite/{inv.token}"
result.append({
baseUrl = frontendUrl.rstrip("/") if frontendUrl else ""
inviteUrl = f"{baseUrl}/invite/{inv.token}" if baseUrl else ""
items.append({
**inv.model_dump(),
"inviteUrl": inviteUrl,
"isExpired": expiresAt < currentTime,
"isUsedUp": currentUses >= maxUses
})
return items
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
try:
return handleFilterValuesInMemory(_buildInvitationItems(), column, pagination)
except Exception as e:
logger.error(f"Error getting filter values for invitations: {e}")
raise HTTPException(status_code=500, detail=str(e))
if mode == "ids":
try:
return handleIdsInMemory(_buildInvitationItems(), pagination)
except Exception as e:
logger.error(f"Error getting IDs for invitations: {e}")
raise HTTPException(status_code=500, detail=str(e))
try:
result = _buildInvitationItems()
paginationParams = None
if pagination:
@ -511,54 +521,6 @@ def list_invitations(
)
@router.get("/filter-values")
@limiter.limit("60/minute")
def get_invitation_filter_values(
request: Request,
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
frontendUrl: str = Query("", description="Frontend URL for building invite links"),
includeUsed: bool = Query(False, description="Include already used invitations"),
includeExpired: bool = Query(False, description="Include expired invitations"),
context: RequestContext = Depends(getRequestContext)
) -> list:
"""Return distinct filter values for a column in invitations."""
if not context.mandateId:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=routeApiMsg("X-Mandate-Id header is required"))
if not _hasMandateAdminRole(context):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=routeApiMsg("Mandate-Admin role required"))
try:
from modules.routes.routeDataUsers import _handleFilterValuesRequest
rootInterface = getRootInterface()
allInvitations = rootInterface.getInvitationsByMandate(str(context.mandateId))
currentTime = getUtcTimestamp()
result = []
for inv in allInvitations:
if inv.revokedAt:
continue
currentUses = inv.currentUses or 0
maxUses = inv.maxUses or 1
if not includeUsed and currentUses >= maxUses:
continue
expiresAt = inv.expiresAt or 0
if not includeExpired and expiresAt < currentTime:
continue
baseUrl = frontendUrl.rstrip("/") if frontendUrl else ""
inviteUrl = f"{baseUrl}/invite/{inv.token}" if baseUrl else ""
result.append({
**inv.model_dump(),
"inviteUrl": inviteUrl,
"isExpired": expiresAt < currentTime,
"isUsedUp": currentUses >= maxUses
})
return _handleFilterValuesRequest(result, column, pagination)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting filter values for invitations: {e}")
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e))
@router.delete("/{invitationId}", response_model=Dict[str, str])
@limiter.limit("30/minute")
def revoke_invitation(

View file

@ -22,7 +22,7 @@ from pydantic import BaseModel, Field
from modules.auth import limiter, getRequestContext, RequestContext
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata, normalize_pagination_dict
from modules.routes.routeDataUsers import _applyFiltersAndSort, _extractDistinctValues
from modules.routes.routeHelpers import _applyFiltersAndSort, handleFilterValuesInMemory, handleIdsInMemory
from modules.shared.i18nRegistry import apiRouteContext, resolveText
routeApiMsg = apiRouteContext("routeSubscription")
@ -397,7 +397,7 @@ def verifyCheckout(
# =============================================================================
def _buildEnrichedSubscriptions() -> List[Dict[str, Any]]:
"""Build the full enriched subscription list (shared by list + filter-values endpoints)."""
"""Build the full enriched subscription list (shared by list + mode=filterValues)."""
from modules.interfaces.interfaceDbSubscription import _getRootInterface as getSubRootInterface
from modules.datamodels.datamodelSubscription import BUILTIN_PLANS, OPERATIVE_STATUSES
@ -480,12 +480,22 @@ def _buildEnrichedSubscriptions() -> List[Dict[str, Any]]:
def getAllSubscriptions(
request: Request,
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext),
):
"""SysAdmin: list ALL subscriptions across all mandates with enriched metadata."""
if not context.hasSysAdminRole:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=routeApiMsg("Sysadmin role required"))
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
return handleFilterValuesInMemory(_buildEnrichedSubscriptions(), column, pagination)
if mode == "ids":
return handleIdsInMemory(_buildEnrichedSubscriptions(), pagination)
paginationParams: Optional[PaginationParams] = None
if pagination:
try:
@ -520,38 +530,6 @@ def getAllSubscriptions(
return {"items": enriched, "pagination": None}
@router.get("/admin/all/filter-values")
@limiter.limit("60/minute")
def getFilterValues(
request: Request,
column: str = Query(..., description="Column key to extract distinct values for"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters (applied except for the requested column)"),
context: RequestContext = Depends(getRequestContext),
):
"""Return distinct values for a column, respecting all active filters except the requested one."""
if not context.hasSysAdminRole:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=routeApiMsg("Sysadmin role required"))
crossFilterParams: Optional[PaginationParams] = None
if pagination:
try:
paginationDict = json.loads(pagination)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
filters = paginationDict.get("filters", {})
filters.pop(column, None)
paginationDict["filters"] = filters
paginationDict.pop("sort", None)
crossFilterParams = PaginationParams(**paginationDict)
except (json.JSONDecodeError, ValueError) as e:
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
enriched = _buildEnrichedSubscriptions()
crossFiltered = _applyFiltersAndSort(enriched, crossFilterParams)
return _extractDistinctValues(crossFiltered, column)
# ============================================================
# Data Volume Usage per Mandate
# ============================================================

View file

@ -20,7 +20,7 @@ from slowapi.util import get_remote_address
from modules.auth.authentication import getRequestContext, RequestContext
from modules.system.mainSystem import NAVIGATION_SECTIONS, _objectKeyToUiComponent
from modules.shared.i18nRegistry import resolveText
from modules.shared.i18nRegistry import resolveText, t
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.interfaces.interfaceFeatures import getFeatureInterface
from modules.datamodels.datamodelRbac import AccessRule, AccessRuleContext
@ -643,7 +643,7 @@ def _buildIntegrationsOverviewPayload(userId: str, user=None) -> Dict[str, Any]:
# --- Platform infra tools (only routes that exist in this deployment) ---
out["infraTools"] = [
{"id": "voice", "label": "Voice / STT"},
{"id": "voice", "label": t("Voice / STT")},
]
accessible_instance_ids: Set[str] = set()

View file

@ -24,7 +24,7 @@ from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG
from modules.datamodels.datamodelPagination import PaginationParams, normalize_pagination_dict
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import (
AutoRun, AutoStepLog, AutoWorkflow, AutoTask,
AutoRun, AutoStepLog, AutoWorkflow, AutoTask, AutoVersion,
)
from modules.shared.i18nRegistry import apiRouteContext
@ -143,6 +143,40 @@ def _scopedWorkflowFilter(context: RequestContext) -> Optional[dict]:
return {"mandateId": "__impossible__"}
def _userMayDeleteWorkflow(context: RequestContext, wfMandateId: Optional[str]) -> bool:
"""Same rules as canDelete on rows in get_system_workflows."""
if context.hasSysAdminRole:
return True
userId = str(context.user.id) if context.user else None
if not userId or not wfMandateId:
return False
userMandateIds = _getUserMandateIds(userId)
adminMandateIds = _getAdminMandateIds(userId, userMandateIds)
return wfMandateId in adminMandateIds
def _cascadeDeleteAutoWorkflow(db: DatabaseConnector, workflowId: str) -> None:
"""Delete AutoWorkflow and dependent rows (same order as interfaceDbApp._cascadeDeleteGraphicalEditorData)."""
wf_id = workflowId
for v in db.getRecordset(AutoVersion, recordFilter={"workflowId": wf_id}) or []:
vid = v.get("id")
if vid:
db.recordDelete(AutoVersion, vid)
for run in db.getRecordset(AutoRun, recordFilter={"workflowId": wf_id}) or []:
run_id = run.get("id")
if not run_id:
continue
for sl in db.getRecordset(AutoStepLog, recordFilter={"runId": run_id}) or []:
slid = sl.get("id")
if slid:
db.recordDelete(AutoStepLog, slid)
db.recordDelete(AutoRun, run_id)
for task in db.getRecordset(AutoTask, recordFilter={"workflowId": wf_id}) or []:
tid = task.get("id")
if tid:
db.recordDelete(AutoTask, tid)
db.recordDelete(AutoWorkflow, wf_id)
@router.get("")
@limiter.limit("60/minute")
@ -153,13 +187,30 @@ def get_workflow_runs(
status: Optional[str] = Query(None, description="Filter by status"),
mandateId: Optional[str] = Query(None, description="Filter by mandate"),
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext),
) -> dict:
"""List workflow runs with RBAC scoping (SQL-paginated)."""
db = _getDb()
if not db._ensureTableExists(AutoRun):
if mode in ("filterValues", "ids"):
from fastapi.responses import JSONResponse
return JSONResponse(content=[])
return {"runs": [], "total": 0, "limit": limit, "offset": offset}
if mode == "filterValues":
if not column:
from fastapi import HTTPException as _H
raise _H(status_code=400, detail="column parameter required for mode=filterValues")
return _enrichedFilterValues(db, context, AutoRun, _scopedRunFilter, column)
if mode == "ids":
from modules.routes.routeHelpers import handleIdsMode
baseFilter = _scopedRunFilter(context)
recordFilter = dict(baseFilter) if baseFilter else {}
return handleIdsMode(db, AutoRun, pagination, recordFilter)
baseFilter = _scopedRunFilter(context)
recordFilter = dict(baseFilter) if baseFilter else {}
@ -186,8 +237,9 @@ def get_workflow_runs(
sort=[{"field": "sysCreatedAt", "direction": "desc"}],
)
result = db.getRecordsetPaginated(
AutoRun,
from modules.routes.routeHelpers import getRecordsetPaginatedWithFkSort
result = getRecordsetPaginatedWithFkSort(
db, AutoRun,
pagination=paginationParams,
recordFilter=recordFilter if recordFilter else None,
)
@ -340,13 +392,31 @@ def get_system_workflows(
active: Optional[bool] = Query(None, description="Filter by active status"),
mandateId: Optional[str] = Query(None, description="Filter by mandate"),
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
context: RequestContext = Depends(getRequestContext),
) -> dict:
"""List all workflows the user has access to (RBAC-scoped, cross-instance)."""
db = _getDb()
if not db._ensureTableExists(AutoWorkflow):
if mode in ("filterValues", "ids"):
from fastapi.responses import JSONResponse
return JSONResponse(content=[])
return {"items": [], "pagination": {"currentPage": 1, "pageSize": 25, "totalItems": 0, "totalPages": 0}}
if mode == "filterValues":
if not column:
from fastapi import HTTPException as _H
raise _H(status_code=400, detail="column parameter required for mode=filterValues")
return _enrichedFilterValues(db, context, AutoWorkflow, _scopedWorkflowFilter, column)
if mode == "ids":
from modules.routes.routeHelpers import handleIdsMode
baseFilter = _scopedWorkflowFilter(context)
recordFilter = dict(baseFilter) if baseFilter else {}
recordFilter["isTemplate"] = False
return handleIdsMode(db, AutoWorkflow, pagination, recordFilter)
baseFilter = _scopedWorkflowFilter(context)
recordFilter = dict(baseFilter) if baseFilter else {}
recordFilter["isTemplate"] = False
@ -373,8 +443,9 @@ def get_system_workflows(
sort=[{"field": "sysCreatedAt", "direction": "desc"}],
)
result = db.getRecordsetPaginated(
AutoWorkflow,
from modules.routes.routeHelpers import getRecordsetPaginatedWithFkSort
result = getRecordsetPaginatedWithFkSort(
db, AutoWorkflow,
pagination=paginationParams,
recordFilter=recordFilter if recordFilter else None,
)
@ -387,6 +458,7 @@ def get_system_workflows(
mandateLabelMap: dict = {}
instanceLabelMap: dict = {}
featureCodeMap: dict = {}
try:
rootIface = getRootInterface()
if mandateIds:
@ -400,6 +472,7 @@ def get_system_workflows(
fi = featureIface.getFeatureInstance(iid)
if fi:
instanceLabelMap[iid] = fi.label or iid
featureCodeMap[iid] = fi.featureCode
except Exception as e:
logger.warning(f"Failed to enrich workflow labels: {e}")
@ -436,6 +509,7 @@ def get_system_workflows(
wfId = row.get("id")
row["mandateLabel"] = mandateLabelMap.get(wMandateId, wMandateId or "")
row["instanceLabel"] = instanceLabelMap.get(row.get("featureInstanceId"), row.get("featureInstanceId") or "")
row["featureCode"] = featureCodeMap.get(row.get("featureInstanceId"), "")
row["isRunning"] = wfId in activeRunMap
row["activeRunId"] = activeRunMap.get(wfId)
row["runCount"] = runCountMap.get(wfId, 0)
@ -469,34 +543,67 @@ def get_system_workflows(
}
@router.delete("/workflows/{workflowId}")
@limiter.limit("30/minute")
def delete_system_workflow(
request: Request,
workflowId: str = Path(..., description="AutoWorkflow ID"),
context: RequestContext = Depends(getRequestContext),
) -> dict:
"""
Delete a workflow by ID without requiring featureInstanceId (orphan / broken FK rows).
RBAC matches get_system_workflows: SysAdmin or Mandate-Admin for the workflow's mandate.
Cascades versions, runs, step logs, tasks same as mandate cascade delete.
"""
db = _getDb()
if not db._ensureTableExists(AutoWorkflow):
raise HTTPException(status_code=404, detail=routeApiMsg("Workflow not found"))
rows = db.getRecordset(AutoWorkflow, recordFilter={"id": workflowId})
if not rows:
raise HTTPException(status_code=404, detail=routeApiMsg("Workflow not found"))
wf = dict(rows[0]) if rows else {}
if wf.get("isTemplate"):
raise HTTPException(status_code=400, detail=routeApiMsg("Cannot delete a template workflow here"))
wf_mandate_id = wf.get("mandateId")
if not _userMayDeleteWorkflow(context, wf_mandate_id):
raise HTTPException(status_code=403, detail=routeApiMsg("Not allowed to delete this workflow"))
try:
_cascadeDeleteAutoWorkflow(db, workflowId)
try:
from modules.shared.callbackRegistry import callbackRegistry
callbackRegistry.trigger("graphicalEditor.workflow.changed")
except Exception:
pass
except Exception as e:
logger.error(f"delete_system_workflow cascade failed: {e}")
raise HTTPException(status_code=500, detail=routeApiMsg(str(e)))
return {"success": True, "id": workflowId}
# ---------------------------------------------------------------------------
# Filter-values endpoints (for FormGeneratorTable column filters)
# ---------------------------------------------------------------------------
def _enrichedFilterValues(
db, context: RequestContext, modelClass, scopeFilter, column: str,
) -> List[str]:
"""Return distinct filter values for enriched columns (mandateLabel, instanceLabel)
or delegate to DB-level DISTINCT for raw columns."""
):
"""Return distinct filter values (IDs) for FK columns or delegate to DB-level DISTINCT.
FK columns return raw IDs the frontend resolves them to labels via fkCache.
Returns JSONResponse to bypass FastAPI response_model validation."""
from fastapi.responses import JSONResponse
if column in ("mandateLabel", "mandateId"):
baseFilter = scopeFilter(context)
recordFilter = dict(baseFilter) if baseFilter else {}
if modelClass == AutoWorkflow:
recordFilter["isTemplate"] = False
items = db.getRecordset(modelClass, recordFilter=recordFilter or None, fieldFilter=["mandateId"]) or []
mandateIds = list({r.get("mandateId") for r in items if r.get("mandateId")})
if not mandateIds:
return []
try:
rootIface = getRootInterface()
mMap = rootIface.getMandatesByIds(mandateIds)
labels = sorted({
getattr(m, "label", None) or getattr(m, "name", mid) or mid
for mid, m in mMap.items()
}, key=lambda v: v.lower())
return labels
except Exception:
return sorted(mandateIds)
mandateIds = sorted({r.get("mandateId") for r in items if r.get("mandateId")})
return JSONResponse(content=mandateIds)
if column in ("instanceLabel", "featureInstanceId"):
baseFilter = scopeFilter(context)
@ -504,28 +611,15 @@ def _enrichedFilterValues(
if modelClass == AutoWorkflow:
recordFilter["isTemplate"] = False
items = db.getRecordset(modelClass, recordFilter=recordFilter or None, fieldFilter=["featureInstanceId"]) or []
instanceIds = list({r.get("featureInstanceId") for r in items if r.get("featureInstanceId")})
instanceIds = sorted({r.get("featureInstanceId") for r in items if r.get("featureInstanceId")})
else:
items = db.getRecordset(modelClass, recordFilter=recordFilter or None, fieldFilter=["workflowId"]) or []
wfIds = list({r.get("workflowId") for r in items if r.get("workflowId")})
instanceIds = []
if wfIds and db._ensureTableExists(AutoWorkflow):
wfs = db.getRecordset(AutoWorkflow, recordFilter={"id": wfIds}, fieldFilter=["featureInstanceId"]) or []
instanceIds = list({w.get("featureInstanceId") for w in wfs if w.get("featureInstanceId")})
if not instanceIds:
return []
try:
from modules.interfaces.interfaceFeatures import getFeatureInterface
rootIface = getRootInterface()
featureIface = getFeatureInterface(rootIface.db)
labels = []
for iid in instanceIds:
fi = featureIface.getFeatureInstance(iid)
if fi:
labels.append(fi.label or iid)
return sorted(set(labels), key=lambda v: v.lower())
except Exception:
return sorted(instanceIds)
instanceIds = sorted({w.get("featureInstanceId") for w in wfs if w.get("featureInstanceId")})
return JSONResponse(content=instanceIds)
if column == "workflowLabel":
baseFilter = scopeFilter(context)
@ -543,43 +637,17 @@ def _enrichedFilterValues(
for wf in wfs:
if wf.get("label"):
labels.add(wf["label"])
return sorted(labels, key=lambda v: v.lower())
return JSONResponse(content=sorted(labels, key=lambda v: v.lower()))
baseFilter = scopeFilter(context)
recordFilter = dict(baseFilter) if baseFilter else {}
if modelClass == AutoWorkflow:
recordFilter["isTemplate"] = False
return db.getDistinctColumnValues(modelClass, column, recordFilter=recordFilter or None) or []
return JSONResponse(content=db.getDistinctColumnValues(modelClass, column, recordFilter=recordFilter or None) or [])
@router.get("/filter-values")
@limiter.limit("60/minute")
def get_run_filter_values(
request: Request,
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
context: RequestContext = Depends(getRequestContext),
) -> list:
"""Return distinct filter values for a column in workflow runs."""
db = _getDb()
if not db._ensureTableExists(AutoRun):
return []
return _enrichedFilterValues(db, context, AutoRun, _scopedRunFilter, column)
@router.get("/workflows/filter-values")
@limiter.limit("60/minute")
def get_workflow_filter_values(
request: Request,
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
context: RequestContext = Depends(getRequestContext),
) -> list:
"""Return distinct filter values for a column in workflows."""
db = _getDb()
if not db._ensureTableExists(AutoWorkflow):
return []
return _enrichedFilterValues(db, context, AutoWorkflow, _scopedWorkflowFilter, column)
# ---------------------------------------------------------------------------

View file

@ -335,6 +335,9 @@ def _buildSchemaContext(
"",
"RULES:",
"- Do NOT invent table or field names. Do NOT prefix fields with UUIDs or dots.",
"- Answer concisely. Cite row counts and key values.",
"- CRITICAL: Return data as compact JSON, NOT as markdown tables or prose.",
"- Do NOT reformat, rewrite, or narrate the tool results. Return the raw data directly.",
"- If the question asks for rows, return them as a JSON array. Do NOT generate a markdown table.",
"- Keep your answer SHORT. The caller is a machine, not a human.",
]
return "\n".join(parts)

View file

@ -162,7 +162,18 @@ class ServiceHub:
functionsOnly = attrName != "ai"
serviceInstance = serviceClass(self)
def _makeServiceResolver(hub):
def _resolver(depKey: str):
return getattr(hub, depKey)
return _resolver
import inspect
sig = inspect.signature(serviceClass.__init__)
paramCount = len([p for p in sig.parameters if p != 'self'])
if paramCount >= 2:
serviceInstance = serviceClass(self, _makeServiceResolver(self))
else:
serviceInstance = serviceClass(self)
setattr(self, attrName, PublicService(serviceInstance, functionsOnly=functionsOnly))
logger.debug(f"Loaded service: {attrName} from {modulePath}")
except Exception as e:

View file

@ -35,6 +35,7 @@ class AttributeDefinition(BaseModel):
placeholder: Optional[str] = None
fkSource: Optional[str] = None
fkDisplayField: Optional[str] = None
fkModel: Optional[str] = None # DB table / Pydantic model name for server-side FK sort (JOIN)
def _getModelLabelEntry(modelName: str) -> Dict[str, Any]:
@ -136,6 +137,7 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
frontend_visible = True # Default visible
frontend_fk_source = None # FK dropdown source (e.g., "/api/users/")
frontend_fk_display_field = None # Which field of the FK target to display (e.g., "username", "name")
fk_model = None # Same as fk_model in json_schema_extra — backend JOIN target table name
if field_info:
# Try direct attributes first (though these won't exist for custom kwargs)
@ -192,6 +194,8 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
# Extract frontend_fk_display_field - which field of FK target to display
if "frontend_fk_display_field" in json_extra:
frontend_fk_display_field = json_extra.get("frontend_fk_display_field")
if "fk_model" in json_extra:
fk_model = json_extra.get("fk_model")
# Use frontend type if available, otherwise detect from Python type
if frontend_type:
@ -267,7 +271,9 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
# Also add display field if specified (which field of FK target to show)
if frontend_fk_display_field:
attr_def["fkDisplayField"] = frontend_fk_display_field
if fk_model:
attr_def["fkModel"] = fk_model
attributes.append(attr_def)
return {"model": model_label, "attributes": attributes}

View file

@ -169,9 +169,23 @@ def _getExecutor(
_stepMeta: Dict[str, Dict[str, str]] = {}
def _stripBinaryValues(obj: Any, depth: int = 0) -> Any:
"""Recursively replace bytes values with None to keep data JSON-safe for DB storage."""
if depth > 12:
return obj
if isinstance(obj, bytes):
return None
if isinstance(obj, dict):
return {k: _stripBinaryValues(v, depth + 1) for k, v in obj.items()}
if isinstance(obj, (list, tuple)):
return [_stripBinaryValues(v, depth + 1) for v in obj]
return obj
def _serializableOutputs(nodeOutputs: Dict[str, Any]) -> Dict[str, Any]:
"""Return a shallow copy of nodeOutputs without the circular _context reference."""
return {k: v for k, v in nodeOutputs.items() if k != "_context"}
"""Return a JSON-safe copy of nodeOutputs: strip _context and binary data."""
cleaned = {k: v for k, v in nodeOutputs.items() if k != "_context"}
return _stripBinaryValues(cleaned)
def _emitStepEvent(runId: str, stepData: Dict[str, Any]) -> None:
@ -204,7 +218,7 @@ def _createStepLog(iface, runId: str, nodeId: str, nodeType: str, status: str =
"nodeId": nodeId,
"nodeType": nodeType,
"status": status,
"inputSnapshot": inputSnapshot or {},
"inputSnapshot": _stripBinaryValues(inputSnapshot) if inputSnapshot else {},
"startedAt": startedAt,
})
_stepMeta[stepId] = {"runId": runId, "nodeId": nodeId, "nodeType": nodeType}
@ -231,7 +245,7 @@ def _updateStepLog(iface, stepId: str, status: str, output: Dict = None, error:
"completedAt": completedAt,
}
if output is not None:
updates["output"] = output
updates["output"] = _stripBinaryValues(output)
if error is not None:
updates["error"] = error
if durationMs is not None:

View file

@ -7,7 +7,7 @@
import json
import logging
import re
from typing import Dict, Any, List, Optional
from typing import Dict, Any, Optional
from modules.features.graphicalEditor.portTypes import (
INPUT_EXTRACTORS,
@ -262,9 +262,10 @@ class ActionNodeExecutor:
}
raise PauseForEmailWaitError(runId=runId, nodeId=nodeId, waitConfig=waitConfig)
# 7. AI nodes: simpleMode by default
if nodeType == "ai.prompt" and "simpleMode" not in resolvedParams:
resolvedParams["simpleMode"] = True
# 7. AI nodes: normalize legacy "prompt" -> "aiPrompt"
if nodeType == "ai.prompt":
if "aiPrompt" not in resolvedParams and "prompt" in resolvedParams:
resolvedParams["aiPrompt"] = resolvedParams.pop("prompt")
# 8. Build context for email.draftEmail from subject + body
if nodeType == "email.draftEmail":
@ -280,34 +281,8 @@ class ActionNodeExecutor:
resolvedParams.pop("subject", None)
resolvedParams.pop("body", None)
# 9. file.create: build context from upstream
if nodeType == "file.create" and "context" not in resolvedParams:
if 0 in inputSources:
srcId, _ = inputSources[0]
upstream = context.get("nodeOutputs", {}).get(srcId)
if upstream and isinstance(upstream, dict):
data = _unwrapTransit(upstream)
ctx = ""
if isinstance(data, dict):
ctx = data.get("context") or data.get("response") or data.get("text") or ""
if ctx:
resolvedParams["context"] = ctx
# 10. Pass upstream documents as documentList if available
# Use truthiness check: empty values ([], "", None) from static graph params
# must not block automatic upstream population via wire connections.
if not resolvedParams.get("documentList") and 0 in inputSources:
srcId, _ = inputSources[0]
upstream = context.get("nodeOutputs", {}).get(srcId)
if upstream and isinstance(upstream, dict):
data = _unwrapTransit(upstream)
if isinstance(data, dict):
docs = data.get("documents") or data.get("documentList")
if docs:
resolvedParams["documentList"] = docs
# 11. Execute action
logger.info("ActionNodeExecutor node %s calling %s.%s", nodeId, methodName, actionName)
# 9. Execute action
logger.info("ActionNodeExecutor node %s calling %s.%s with %d params", nodeId, methodName, actionName, len(resolvedParams))
try:
executor = ActionExecutor(self.services)
result = await executor.executeAction(methodName, actionName, resolvedParams)
@ -315,24 +290,61 @@ class ActionNodeExecutor:
logger.exception("ActionNodeExecutor node %s FAILED: %s", nodeId, e)
return _normalizeError(e, outputSchema)
# 12. Build normalized output
docsList = [d.model_dump() if hasattr(d, "model_dump") else d for d in (result.documents or [])]
# 10. Persist generated documents as files and build JSON-safe output
docsList = []
for d in (result.documents or []):
dumped = d.model_dump() if hasattr(d, "model_dump") else dict(d) if isinstance(d, dict) else d
rawData = getattr(d, "documentData", None) if hasattr(d, "documentData") else (dumped.get("documentData") if isinstance(dumped, dict) else None)
if isinstance(dumped, dict) and isinstance(rawData, bytes) and len(rawData) > 0:
try:
from modules.interfaces.interfaceDbManagement import getInterface as _getMgmtInterface
from modules.security.rootAccess import getRootUser
_userId = context.get("userId")
_mandateId = context.get("mandateId")
_instanceId = context.get("instanceId")
_mgmt = _getMgmtInterface(getRootUser(), mandateId=_mandateId, featureInstanceId=_instanceId)
_docName = dumped.get("documentName") or f"workflow-result-{nodeId}.bin"
_mimeType = dumped.get("mimeType") or "application/octet-stream"
_fileItem = _mgmt.createFile(_docName, _mimeType, rawData)
_mgmt.createFileData(_fileItem.id, rawData)
dumped["fileId"] = _fileItem.id
dumped["id"] = _fileItem.id
dumped["fileName"] = _fileItem.fileName
logger.info("Persisted workflow document %s as file %s", _docName, _fileItem.id)
except Exception as _fe:
logger.warning("Could not persist workflow document: %s", _fe)
dumped["documentData"] = None
dumped["_hasBinaryData"] = True
docsList.append(dumped)
extractedContext = ""
if result.documents:
doc = result.documents[0]
raw = getattr(doc, "documentData", None) if hasattr(doc, "documentData") else (doc.get("documentData") if isinstance(doc, dict) else None)
if raw:
extractedContext = raw.decode("utf-8", errors="replace").strip() if isinstance(raw, bytes) else str(raw).strip()
if isinstance(raw, bytes):
try:
extractedContext = raw.decode("utf-8").strip()
except (UnicodeDecodeError, ValueError):
extractedContext = ""
elif raw:
extractedContext = str(raw).strip()
promptText = str(resolvedParams.get("aiPrompt") or resolvedParams.get("prompt") or "").strip()
resultData = getattr(result, "data", None)
if resultData and isinstance(resultData, dict):
dataField = resultData
elif hasattr(result, "model_dump"):
dataField = result.model_dump()
else:
dataField = {"success": result.success, "error": result.error}
out = {
"success": result.success,
"error": result.error,
"documents": docsList,
"documentList": docsList,
"data": result.model_dump() if hasattr(result, "model_dump") else {"success": result.success, "error": result.error},
"data": dataField,
}
if nodeType.startswith("ai."):

View file

@ -188,19 +188,17 @@ async def process(self, parameters: Dict[str, Any]) -> ActionResult:
# Simple mode: fast path without document generation pipeline
if simpleMode:
# Update progress - calling AI (simple mode)
self.services.chat.progressLogUpdate(operationId, 0.6, "Calling AI (simple mode)")
# Extract context from documents if provided
context_text = ""
context_parts = []
paramContext = parameters.get("context")
if paramContext and isinstance(paramContext, str) and paramContext.strip():
context_parts.append(paramContext.strip())
if documentList and len(documentList.references) > 0:
try:
# Get documents from workflow
documents = self.services.chat.getChatDocumentsFromDocumentList(documentList)
context_parts = []
for doc in documents:
if hasattr(doc, 'fileId') and doc.fileId:
# Get file data
fileData = self.services.interfaceDbComponent.getFileData(doc.fileId)
if fileData:
if isinstance(fileData, bytes):
@ -208,12 +206,10 @@ async def process(self, parameters: Dict[str, Any]) -> ActionResult:
else:
doc_text = str(fileData)
context_parts.append(doc_text)
if context_parts:
context_text = "\n\n".join(context_parts)
except Exception as e:
logger.warning(f"Error extracting context from documents in simple mode: {e}")
context_text = "\n\n".join(context_parts) if context_parts else ""
# Use direct AI call without document generation pipeline
request = AiCallRequest(
prompt=aiPrompt,
context=context_text if context_text else None,
@ -260,7 +256,10 @@ async def process(self, parameters: Dict[str, Any]) -> ActionResult:
# For code generation, use ai.generateCode action or explicitly pass generationIntent="code"
generationIntent = parameters.get("generationIntent", "document")
# Update progress - calling AI
paramContext = parameters.get("context")
if paramContext and isinstance(paramContext, str) and paramContext.strip():
aiPrompt = f"{aiPrompt}\n\n--- DATA CONTEXT ---\n{paramContext.strip()}"
self.services.chat.progressLogUpdate(operationId, 0.6, "Calling AI")
# Use unified callAiContent method with BOTH documentList and contentParts

View file

@ -74,7 +74,7 @@ async def create(self, parameters: Dict[str, Any]) -> ActionResult:
Create a file from context (text/markdown from upstream AI node).
Uses GenerationService.renderReport to produce docx, pdf, txt, md, html, xlsx, etc.
"""
context = parameters.get("context", "") or ""
context = parameters.get("context", "") or parameters.get("text", "") or ""
if not isinstance(context, str):
context = str(context) if context else ""
context = context.strip()

View file

@ -10,7 +10,7 @@ import logging
import time
from typing import Dict, Any
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
from modules.datamodels.datamodelChat import ActionResult
logger = logging.getLogger(__name__)
@ -75,13 +75,11 @@ async def refreshAccountingData(self, parameters: Dict[str, Any]) -> ActionResul
counts["lastSyncAt"] = lastSyncAt
counts["lastSyncStatus"] = lastSyncStatus
counts["message"] = f"Data is fresh (synced {int(time.time() - lastSyncAt)}s ago). Use forceRefresh=true to re-import."
return ActionResult.isSuccess(documents=[
ActionDocument(
documentName="refresh_result",
documentData=json.dumps(counts, ensure_ascii=False),
mimeType="application/json",
)
])
dataExport = _exportAccountingData(trusteeInterface, featureInstanceId, dateFrom, dateTo)
return ActionResult.isSuccess(data={
"summary": counts,
"accountingData": dataExport,
})
from modules.features.trustee.accounting.accountingDataSync import AccountingDataSync
@ -103,18 +101,108 @@ async def refreshAccountingData(self, parameters: Dict[str, Any]) -> ActionResul
except Exception as cacheErr:
logger.warning("Could not clear feature query cache: %s", cacheErr)
return ActionResult.isSuccess(documents=[
ActionDocument(
documentName="refresh_result",
documentData=json.dumps(summary, ensure_ascii=False),
mimeType="application/json",
)
])
dataExport = _exportAccountingData(trusteeInterface, featureInstanceId, dateFrom, dateTo)
return ActionResult.isSuccess(data={
"summary": summary,
"accountingData": dataExport,
})
except Exception as e:
logger.exception("refreshAccountingData failed")
return ActionResult.isFailure(error=str(e))
def _exportAccountingData(trusteeInterface, featureInstanceId: str, dateFrom: str = None, dateTo: str = None) -> str:
"""Export accounting data (accounts, balances, journal entries+lines) as compact JSON for downstream AI nodes."""
from modules.features.trustee.datamodelFeatureTrustee import (
TrusteeDataAccount,
TrusteeDataJournalEntry,
TrusteeDataJournalLine,
TrusteeDataAccountBalance,
)
try:
baseFilter = {"featureInstanceId": featureInstanceId}
accounts = trusteeInterface.db.getRecordset(TrusteeDataAccount, recordFilter=baseFilter) or []
accountMap = {}
for a in accounts:
nr = a.get("accountNumber", "")
accountMap[nr] = {
"nr": nr,
"label": a.get("label", ""),
"type": a.get("accountType", ""),
"group": a.get("accountGroup", ""),
}
balances = trusteeInterface.db.getRecordset(TrusteeDataAccountBalance, recordFilter=baseFilter) or []
balanceList = []
for b in balances:
balanceList.append({
"account": b.get("accountNumber", ""),
"year": b.get("periodYear", 0),
"month": b.get("periodMonth", 0),
"opening": b.get("openingBalance", 0),
"debit": b.get("debitTotal", 0),
"credit": b.get("creditTotal", 0),
"closing": b.get("closingBalance", 0),
})
entries = trusteeInterface.db.getRecordset(TrusteeDataJournalEntry, recordFilter=baseFilter) or []
entryMap = {}
for e in entries:
eid = e.get("id", "")
bDate = e.get("bookingDate", "")
if dateFrom and bDate and bDate < dateFrom:
continue
if dateTo and bDate and bDate > dateTo:
continue
entryMap[eid] = {
"date": bDate,
"ref": e.get("reference", ""),
"desc": e.get("description", ""),
"amount": e.get("totalAmount", 0),
}
lines = trusteeInterface.db.getRecordset(TrusteeDataJournalLine, recordFilter=baseFilter) or []
lineList = []
for ln in lines:
jeId = ln.get("journalEntryId", "")
if jeId not in entryMap:
continue
entry = entryMap[jeId]
lineList.append({
"date": entry["date"],
"ref": entry["ref"],
"account": ln.get("accountNumber", ""),
"accountLabel": accountMap.get(ln.get("accountNumber", ""), {}).get("label", ""),
"debit": ln.get("debitAmount", 0),
"credit": ln.get("creditAmount", 0),
"desc": ln.get("description", "") or entry["desc"],
"taxCode": ln.get("taxCode", ""),
"costCenter": ln.get("costCenter", ""),
})
export = {
"accounts": list(accountMap.values()),
"balances": balanceList,
"journalLines": lineList,
"meta": {
"accountCount": len(accountMap),
"entryCount": len(entryMap),
"lineCount": len(lineList),
"balanceCount": len(balanceList),
"dateFrom": dateFrom,
"dateTo": dateTo,
},
}
result = json.dumps(export, ensure_ascii=False, default=str)
logger.info("Exported accounting data: %d accounts, %d entries, %d lines, %d balances (%d bytes)",
len(accountMap), len(entryMap), len(lineList), len(balanceList), len(result))
return result
except Exception as e:
logger.warning("Could not export accounting data: %s", e)
return ""
def _getCachedCounts(trusteeInterface, featureInstanceId: str) -> Dict[str, Any]:
"""Count existing records per TrusteeData* table without triggering an external sync."""
from modules.features.trustee.datamodelFeatureTrustee import (

View file

@ -196,20 +196,6 @@ except Exception as e:
errors.append(f"Phase 1 Registration: {e}")
print(f" [FAIL] Phase 1 Registration: {e}")
# ── Phase 1: Migration ────────────────────────────────────────────────────────
print("\n--- Phase 1: Migration ---")
try:
with open(os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
"modules", "migration", "migrateRootUsers.py"), "r") as f:
source = f.read()
_check("Migration script exists", True)
_check("Migration has _isMigrationCompleted", "_isMigrationCompleted" in source)
_check("Migration has migrateRootUsers", "migrateRootUsers" in source)
except Exception as e:
errors.append(f"Phase 1 Migration: {e}")
print(f" [FAIL] Phase 1 Migration: {e}")
# ── Fix 1: OnboardingWizard Integration ────────────────────────────────────────
print("\n--- Fix 1: OnboardingWizard Integration ---")