cleanup routesintegration view

This commit is contained in:
ValueOn AG 2026-04-12 18:32:21 +02:00
parent e43b0741ed
commit 5780cc0324
28 changed files with 728 additions and 6020 deletions

9
app.py
View file

@ -570,9 +570,6 @@ app.include_router(voiceGoogleRouter)
from modules.routes.routeVoiceUser import router as voiceUserRouter from modules.routes.routeVoiceUser import router as voiceUserRouter
app.include_router(voiceUserRouter) app.include_router(voiceUserRouter)
from modules.routes.routeSecurityAdmin import router as adminSecurityRouter
app.include_router(adminSecurityRouter)
from modules.routes.routeSharepoint import router as sharepointRouter from modules.routes.routeSharepoint import router as sharepointRouter
app.include_router(sharepointRouter) app.include_router(sharepointRouter)
@ -582,9 +579,6 @@ app.include_router(adminLogsRouter)
from modules.routes.routeAdminRbacRules import router as rbacAdminRulesRouter from modules.routes.routeAdminRbacRules import router as rbacAdminRulesRouter
app.include_router(rbacAdminRulesRouter) app.include_router(rbacAdminRulesRouter)
from modules.routes.routeMessaging import router as messagingRouter
app.include_router(messagingRouter)
from modules.routes.routeAdminFeatures import router as featuresAdminRouter from modules.routes.routeAdminFeatures import router as featuresAdminRouter
app.include_router(featuresAdminRouter) app.include_router(featuresAdminRouter)
@ -600,9 +594,6 @@ app.include_router(notificationsRouter)
from modules.routes.routeI18n import router as i18nRouter from modules.routes.routeI18n import router as i18nRouter
app.include_router(i18nRouter) app.include_router(i18nRouter)
from modules.routes.routeAdminRbacExport import router as rbacAdminExportRouter
app.include_router(rbacAdminExportRouter)
from modules.routes.routeAdminUserAccessOverview import router as userAccessOverviewRouter from modules.routes.routeAdminUserAccessOverview import router as userAccessOverviewRouter
app.include_router(userAccessOverviewRouter) app.include_router(userAccessOverviewRouter)

View file

@ -243,12 +243,12 @@ class User(PowerOnModel):
) )
language: str = Field( language: str = Field(
default="de", default="de",
description="Preferred UI language code (must exist as UiLanguageSet; loaded from /api/i18n/user-language-options).", description="Preferred UI language code (must exist as UiLanguageSet).",
json_schema_extra={ json_schema_extra={
"frontend_type": "select", "frontend_type": "select",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": True, "frontend_required": True,
"frontend_options": "/api/i18n/user-language-options", "frontend_options": "/api/i18n/codes",
"label": "Sprache", "label": "Sprache",
}, },
) )

View file

@ -110,38 +110,6 @@ def _validateInstanceAccess(instanceId: str, context: RequestContext) -> str:
return str(instance.mandateId) if instance.mandateId else "" return str(instance.mandateId) if instance.mandateId else ""
@router.get("/{instanceId}/info")
@limiter.limit("60/minute")
def get_info(
request: Request,
instanceId: str = Path(..., description="Feature instance ID"),
context: RequestContext = Depends(getRequestContext),
) -> dict:
"""Minimal info endpoint - proves the feature works."""
_validateInstanceAccess(instanceId, context)
return {
"featureCode": "graphicalEditor",
"instanceId": instanceId,
"status": "ok",
"message": "GraphicalEditor feature ready.",
}
@router.post("/{instanceId}/schedule-sync")
@limiter.limit("10/minute")
def post_schedule_sync(
request: Request,
instanceId: str = Path(..., description="Feature instance ID"),
context: RequestContext = Depends(getRequestContext),
) -> dict:
"""Manually trigger schedule sync (re-register cron jobs for all schedule workflows)."""
_validateInstanceAccess(instanceId, context)
from modules.workflows.scheduler.mainScheduler import syncNow
result = syncNow()
return {"success": True, **(result or {})}
@router.get("/{instanceId}/node-types") @router.get("/{instanceId}/node-types")
@limiter.limit("60/minute") @limiter.limit("60/minute")
def get_node_types( def get_node_types(
@ -1003,128 +971,6 @@ def delete_workflow(
return {"success": True} return {"success": True}
@router.post("/{instanceId}/workflows/{workflowId}/webhooks/{entryPointId}")
@limiter.limit("60/minute")
async def post_workflow_webhook(
request: Request,
instanceId: str = Path(..., description="Feature instance ID"),
workflowId: str = Path(..., description="Workflow ID"),
entryPointId: str = Path(..., description="Entry point ID (kind must be webhook)"),
body: dict = Body(default_factory=dict),
context: RequestContext = Depends(getRequestContext),
) -> dict:
"""Invoke a workflow via a webhook entry point."""
mandateId = _validateInstanceAccess(instanceId, context)
userId = str(context.user.id) if context.user else None
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
wf = iface.getWorkflow(workflowId)
if not wf or not wf.get("graph"):
raise HTTPException(status_code=404, detail=routeApiMsg("Workflow not found"))
inv = find_invocation(wf, entryPointId)
if not inv:
raise HTTPException(status_code=404, detail=routeApiMsg("Entry point not found"))
if inv.get("kind") != "webhook":
raise HTTPException(status_code=400, detail=routeApiMsg("Entry point is not a webhook"))
if not inv.get("enabled", True):
raise HTTPException(status_code=400, detail=routeApiMsg("Entry point is disabled"))
cfg = inv.get("config") or {}
secret = cfg.get("webhookSecret")
if secret:
hdr = request.headers.get("X-Webhook-Secret")
if hdr != str(secret):
raise HTTPException(status_code=403, detail=routeApiMsg("Invalid webhook secret"))
services = getGraphicalEditorServices(
context.user,
mandateId=mandateId,
featureInstanceId=instanceId,
)
from modules.workflows.processing.shared.methodDiscovery import discoverMethods
discoverMethods(services)
title = inv.get("title") or {}
label = resolveText(title)
pl = body if isinstance(body, dict) else {}
base = default_run_envelope(
"webhook",
entry_point_id=inv.get("id"),
entry_point_label=label or None,
payload=pl,
raw={"httpBody": body},
)
run_env = normalize_run_envelope(base, user_id=userId)
result = await executeGraph(
graph=wf["graph"],
services=services,
workflowId=workflowId,
instanceId=instanceId,
userId=userId,
mandateId=mandateId,
automation2_interface=iface,
run_envelope=run_env,
)
return result
@router.post("/{instanceId}/workflows/{workflowId}/forms/{entryPointId}/submit")
@limiter.limit("60/minute")
async def post_workflow_form_submit(
request: Request,
instanceId: str = Path(..., description="Feature instance ID"),
workflowId: str = Path(..., description="Workflow ID"),
entryPointId: str = Path(..., description="Entry point ID (kind must be form)"),
body: dict = Body(default_factory=dict),
context: RequestContext = Depends(getRequestContext),
) -> dict:
"""Form-style submit: same as execute with trigger.type form and payload from body."""
mandateId = _validateInstanceAccess(instanceId, context)
userId = str(context.user.id) if context.user else None
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
wf = iface.getWorkflow(workflowId)
if not wf or not wf.get("graph"):
raise HTTPException(status_code=404, detail=routeApiMsg("Workflow not found"))
inv = find_invocation(wf, entryPointId)
if not inv:
raise HTTPException(status_code=404, detail=routeApiMsg("Entry point not found"))
if inv.get("kind") != "form":
raise HTTPException(status_code=400, detail=routeApiMsg("Entry point is not a form"))
if not inv.get("enabled", True):
raise HTTPException(status_code=400, detail=routeApiMsg("Entry point is disabled"))
services = getGraphicalEditorServices(
context.user,
mandateId=mandateId,
featureInstanceId=instanceId,
)
from modules.workflows.processing.shared.methodDiscovery import discoverMethods
discoverMethods(services)
title = inv.get("title") or {}
label = resolveText(title)
pl = body if isinstance(body, dict) else {}
base = default_run_envelope(
"form",
entry_point_id=inv.get("id"),
entry_point_label=label or None,
payload=pl,
raw={"formBody": body},
)
run_env = normalize_run_envelope(base, user_id=userId)
result = await executeGraph(
graph=wf["graph"],
services=services,
workflowId=workflowId,
instanceId=instanceId,
userId=userId,
mandateId=mandateId,
automation2_interface=iface,
run_envelope=run_env,
)
return result
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
# Runs and Resume # Runs and Resume
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
@ -1182,55 +1028,6 @@ def get_run_steps(
return {"steps": steps} return {"steps": steps}
@router.post("/{instanceId}/runs/{runId}/resume")
@limiter.limit("30/minute")
async def resume_run(
request: Request,
instanceId: str = Path(..., description="Feature instance ID"),
runId: str = Path(..., description="Run ID"),
body: dict = Body(..., description="{ taskId, result }"),
context: RequestContext = Depends(getRequestContext),
) -> dict:
"""Resume a paused run after task completion."""
mandateId = _validateInstanceAccess(instanceId, context)
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
run = iface.getRun(runId)
if not run:
raise HTTPException(status_code=404, detail=routeApiMsg("Run not found"))
taskId = body.get("taskId")
result = body.get("result")
if not taskId or result is None:
raise HTTPException(status_code=400, detail=routeApiMsg("taskId and result required"))
task = iface.getTask(taskId)
if not task or task.get("runId") != runId:
raise HTTPException(status_code=404, detail=routeApiMsg("Task not found"))
if task.get("status") != "pending":
raise HTTPException(status_code=400, detail=routeApiMsg("Task already completed"))
iface.updateTask(taskId, status="completed", result=result)
nodeId = task.get("nodeId")
nodeOutputs = dict(run.get("nodeOutputs") or {})
nodeOutputs[nodeId] = result
workflowId = run.get("workflowId")
wf = iface.getWorkflow(workflowId) if workflowId else None
if not wf or not wf.get("graph"):
raise HTTPException(status_code=400, detail=routeApiMsg("Workflow graph not found"))
graph = wf["graph"]
services = getGraphicalEditorServices(context.user, mandateId=mandateId, featureInstanceId=instanceId)
resume_result = await executeGraph(
graph=graph,
services=services,
workflowId=workflowId,
instanceId=instanceId,
userId=str(context.user.id) if context.user else None,
mandateId=mandateId,
automation2_interface=iface,
initialNodeOutputs=nodeOutputs,
startAfterNodeId=nodeId,
runId=runId,
)
return resume_result
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
# Tasks # Tasks
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------

View file

@ -1729,6 +1729,71 @@ def clear_ai_data_cache(
return {"cleared": removed, "featureInstanceId": instanceId} return {"cleared": removed, "featureInstanceId": instanceId}
# ===== Data Export =====
@router.get("/{instanceId}/accounting/export-data")
@limiter.limit("3/minute")
def export_accounting_data(
request: Request,
instanceId: str = Path(..., description="Feature Instance ID"),
context: RequestContext = Depends(getRequestContext),
) -> Response:
"""Export all TrusteeData* tables for this instance as a JSON download (admin only)."""
mandateId = _validateInstanceAccess(instanceId, context)
from .datamodelFeatureTrustee import (
TrusteeDataAccount,
TrusteeDataJournalEntry,
TrusteeDataJournalLine,
TrusteeDataContact,
TrusteeDataAccountBalance,
TrusteeAccountingConfig,
)
import time as _time
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
_filter = {"featureInstanceId": instanceId}
tables: Dict[str, Any] = {}
for tableName, model in [
("TrusteeDataAccount", TrusteeDataAccount),
("TrusteeDataJournalEntry", TrusteeDataJournalEntry),
("TrusteeDataJournalLine", TrusteeDataJournalLine),
("TrusteeDataContact", TrusteeDataContact),
("TrusteeDataAccountBalance", TrusteeDataAccountBalance),
]:
records = interface.db.getRecordset(model, recordFilter=_filter) or []
tables[tableName] = records
cfgRecords = interface.db.getRecordset(
TrusteeAccountingConfig,
recordFilter={"featureInstanceId": instanceId, "isActive": True},
)
syncInfo = {}
if cfgRecords:
cfg = cfgRecords[0]
syncInfo = {
"connectorType": cfg.get("connectorType", ""),
"lastSyncAt": cfg.get("lastSyncAt"),
"lastSyncStatus": cfg.get("lastSyncStatus", ""),
}
payload = {
"exportedAt": _time.time(),
"featureInstanceId": instanceId,
"mandateId": mandateId,
"syncInfo": syncInfo,
"tables": tables,
}
jsonBytes = json.dumps(payload, ensure_ascii=False, default=str).encode("utf-8")
return Response(
content=jsonBytes,
media_type="application/json",
headers={"Content-Disposition": f'attachment; filename="trustee_data_{instanceId[:8]}.json"'},
)
# ===== Position-Document Query ===== # ===== Position-Document Query =====
@router.get("/{instanceId}/positions/document/{documentId}", response_model=List[TrusteePosition]) @router.get("/{instanceId}/positions/document/{documentId}", response_model=List[TrusteePosition])

View file

@ -1709,7 +1709,7 @@ class ComponentObjects:
logger.warning(f"No access to file ID {fileId}") logger.warning(f"No access to file ID {fileId}")
return None return None
fileDataEntries = getRecordsetWithRBAC(self.db, FileData, self.currentUser, recordFilter={"id": fileId}, mandateId=self.mandateId) fileDataEntries = self.db.getRecordset(FileData, recordFilter={"id": fileId})
if not fileDataEntries: if not fileDataEntries:
logger.warning(f"No data found for file ID {fileId}") logger.warning(f"No data found for file ID {fileId}")
return None return None

View file

@ -1,602 +0,0 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""
RBAC export/import routes for the backend API.
Implements endpoints for exporting and importing RBAC configurations.
Multi-Tenant Design:
- Global templates: SysAdmin can export/import
- Mandate-scoped RBAC: Mandate Admin can export/import
- Feature instance roles: Included in mandate export
"""
from fastapi import APIRouter, HTTPException, Depends, Request, UploadFile, File
from fastapi.responses import JSONResponse
from typing import List, Dict, Any, Optional
from fastapi import status
import logging
import json
from pydantic import BaseModel, Field
from modules.auth import limiter, getRequestContext, RequestContext, requireSysAdminRole
from modules.datamodels.datamodelUam import User
from modules.datamodels.datamodelRbac import Role, AccessRule
from modules.datamodels.datamodelUtils import coerce_text_multilingual
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.shared.timeUtils import getUtcTimestamp
from modules.shared.i18nRegistry import apiRouteContext
routeApiMsg = apiRouteContext("routeAdminRbacExport")
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/api/rbac",
tags=["RBAC Export/Import"],
responses={404: {"description": "Not found"}}
)
# =============================================================================
# Request/Response Models
# =============================================================================
class RoleExport(BaseModel):
"""Export model for a role with its access rules"""
roleLabel: str
description: Dict[str, str]
featureCode: Optional[str]
isSystemRole: bool
accessRules: List[Dict[str, Any]]
class RbacExportData(BaseModel):
"""Complete RBAC export data"""
exportVersion: str = "1.0"
exportedAt: float
exportedBy: str
scope: str # "global" or "mandate"
mandateId: Optional[str]
roles: List[RoleExport]
class RbacImportResult(BaseModel):
"""Result of RBAC import operation"""
rolesCreated: int
rolesUpdated: int
rolesSkipped: int
rulesCreated: int
rulesUpdated: int
errors: List[str]
# =============================================================================
# Global RBAC Export/Import (SysAdmin only)
# =============================================================================
@router.get("/export/global", response_model=RbacExportData)
@limiter.limit("10/minute")
def export_global_rbac(
request: Request,
sysAdmin: User = Depends(requireSysAdminRole)
) -> RbacExportData:
"""
Export global (template) RBAC rules.
SysAdmin only - exports template roles that are copied to new feature instances.
These are roles with mandateId=NULL.
"""
try:
rootInterface = getRootInterface()
# Get all global template roles (mandateId is NULL) using interface method
allRoles = rootInterface.getAllRoles()
globalRoles = [r for r in allRoles if r.mandateId is None]
exportRoles = []
for role in globalRoles:
roleId = role.id
# Get access rules for this role using interface method
accessRules = rootInterface.getAccessRulesByRole(roleId)
exportRoles.append(RoleExport(
roleLabel=role.roleLabel,
description=role.description or {},
featureCode=role.featureCode,
isSystemRole=role.isSystemRole,
accessRules=[
{
"context": r.context,
"item": r.item,
"view": r.view if r.view is not None else False,
"read": r.read,
"create": r.create,
"update": r.update,
"delete": r.delete
}
for r in accessRules
]
))
logger.info(f"SysAdmin {sysAdmin.id} exported global RBAC ({len(exportRoles)} roles)")
return RbacExportData(
exportedAt=getUtcTimestamp(),
exportedBy=str(sysAdmin.id),
scope="global",
mandateId=None,
roles=exportRoles
)
except Exception as e:
logger.error(f"Error exporting global RBAC: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to export RBAC: {str(e)}"
)
@router.post("/import/global", response_model=RbacImportResult)
@limiter.limit("5/minute")
async def import_global_rbac(
request: Request,
file: UploadFile = File(..., description="JSON file with RBAC export data"),
updateExisting: bool = False,
sysAdmin: User = Depends(requireSysAdminRole)
) -> RbacImportResult:
"""
Import global (template) RBAC rules.
SysAdmin only - imports template roles and their access rules.
Args:
file: JSON file containing RbacExportData
updateExisting: If True, update existing roles. If False, skip them.
"""
try:
# Read and parse file
content = await file.read()
try:
data = json.loads(content.decode("utf-8"))
except json.JSONDecodeError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Invalid JSON: {str(e)}"
)
# Validate structure
if "roles" not in data:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=routeApiMsg("Missing 'roles' field in import data")
)
rootInterface = getRootInterface()
result = RbacImportResult(
rolesCreated=0,
rolesUpdated=0,
rolesSkipped=0,
rulesCreated=0,
rulesUpdated=0,
errors=[]
)
for roleData in data.get("roles", []):
try:
roleLabel = roleData.get("roleLabel")
featureCode = roleData.get("featureCode")
if not roleLabel:
result.errors.append(f"Role without label skipped")
result.rolesSkipped += 1
continue
# Check if role exists (global role with same label and featureCode) using interface method
allRoles = rootInterface.getAllRoles()
existingRoles = [
r for r in allRoles
if r.roleLabel == roleLabel
and r.mandateId is None
and r.featureCode == featureCode
]
if existingRoles:
if updateExisting:
# Update existing role
existingRole = existingRoles[0]
roleId = existingRole.id
rootInterface.db.recordModify(
Role,
roleId,
{
"description": roleData.get("description", {}),
"isSystemRole": roleData.get("isSystemRole", False)
}
)
# Update access rules
result.rulesUpdated += _updateAccessRules(
rootInterface,
roleId,
roleData.get("accessRules", [])
)
result.rolesUpdated += 1
else:
result.rolesSkipped += 1
continue
else:
# Create new role
newRole = Role(
roleLabel=roleLabel,
description=coerce_text_multilingual(roleData.get("description", {})),
featureCode=featureCode,
mandateId=None,
featureInstanceId=None,
isSystemRole=roleData.get("isSystemRole", False)
)
createdRole = rootInterface.db.recordCreate(Role, newRole.model_dump())
roleId = createdRole.get("id")
# Create access rules
for ruleData in roleData.get("accessRules", []):
newRule = AccessRule(
roleId=roleId,
context=ruleData.get("context"),
item=ruleData.get("item"),
view=ruleData.get("view", False),
read=ruleData.get("read"),
create=ruleData.get("create"),
update=ruleData.get("update"),
delete=ruleData.get("delete")
)
rootInterface.db.recordCreate(AccessRule, newRule.model_dump())
result.rulesCreated += 1
result.rolesCreated += 1
except Exception as e:
result.errors.append(f"Error processing role '{roleData.get('roleLabel', 'unknown')}': {str(e)}")
logger.info(
f"SysAdmin {sysAdmin.id} imported global RBAC: "
f"{result.rolesCreated} created, {result.rolesUpdated} updated, "
f"{result.rolesSkipped} skipped"
)
return result
except HTTPException:
raise
except Exception as e:
logger.error(f"Error importing global RBAC: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to import RBAC: {str(e)}"
)
# =============================================================================
# Mandate RBAC Export/Import (Mandate Admin)
# =============================================================================
@router.get("/export/mandate", response_model=RbacExportData)
@limiter.limit("10/minute")
def export_mandate_rbac(
request: Request,
includeFeatureInstances: bool = True,
context: RequestContext = Depends(getRequestContext)
) -> RbacExportData:
"""
Export RBAC rules for the current mandate.
Requires Mandate-Admin role. Exports mandate-level roles and optionally
feature instance roles.
Args:
includeFeatureInstances: Include feature instance roles in export
"""
if not context.mandateId:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=routeApiMsg("X-Mandate-Id header is required")
)
# Check mandate admin permission
if not _hasMandateAdminRole(context):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Mandate-Admin role required to export RBAC")
)
try:
rootInterface = getRootInterface()
# Get mandate-level roles using interface method
allRoles = rootInterface.getAllRoles()
mandateRoles = [
r for r in allRoles
if str(r.mandateId) == str(context.mandateId)
]
# Filter by feature instance if not including them
if not includeFeatureInstances:
mandateRoles = [r for r in mandateRoles if not r.featureInstanceId]
exportRoles = []
for role in mandateRoles:
roleId = role.id
# Get access rules for this role using interface method
accessRules = rootInterface.getAccessRulesByRole(roleId)
exportRoles.append(RoleExport(
roleLabel=role.roleLabel,
description=role.description or {},
featureCode=role.featureCode,
isSystemRole=role.isSystemRole,
accessRules=[
{
"context": r.context,
"item": r.item,
"view": r.view if r.view is not None else False,
"read": r.read,
"create": r.create,
"update": r.update,
"delete": r.delete
}
for r in accessRules
]
))
logger.info(
f"User {context.user.id} exported mandate {context.mandateId} RBAC "
f"({len(exportRoles)} roles)"
)
return RbacExportData(
exportedAt=getUtcTimestamp(),
exportedBy=str(context.user.id),
scope="mandate",
mandateId=str(context.mandateId),
roles=exportRoles
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error exporting mandate RBAC: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to export RBAC: {str(e)}"
)
@router.post("/import/mandate", response_model=RbacImportResult)
@limiter.limit("5/minute")
async def import_mandate_rbac(
request: Request,
file: UploadFile = File(..., description="JSON file with RBAC export data"),
updateExisting: bool = False,
context: RequestContext = Depends(getRequestContext)
) -> RbacImportResult:
"""
Import RBAC rules for the current mandate.
Requires Mandate-Admin role. Imports roles as mandate-level roles
(not feature instance roles - those are created via template copying).
Args:
file: JSON file containing RbacExportData
updateExisting: If True, update existing roles. If False, skip them.
"""
if not context.mandateId:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=routeApiMsg("X-Mandate-Id header is required")
)
# Check mandate admin permission
if not _hasMandateAdminRole(context):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Mandate-Admin role required to import RBAC")
)
try:
# Read and parse file
content = await file.read()
try:
data = json.loads(content.decode("utf-8"))
except json.JSONDecodeError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Invalid JSON: {str(e)}"
)
# Validate structure
if "roles" not in data:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=routeApiMsg("Missing 'roles' field in import data")
)
rootInterface = getRootInterface()
result = RbacImportResult(
rolesCreated=0,
rolesUpdated=0,
rolesSkipped=0,
rulesCreated=0,
rulesUpdated=0,
errors=[]
)
for roleData in data.get("roles", []):
try:
roleLabel = roleData.get("roleLabel")
featureCode = roleData.get("featureCode")
if not roleLabel:
result.errors.append(f"Role without label skipped")
result.rolesSkipped += 1
continue
# System roles cannot be imported at mandate level
if roleData.get("isSystemRole", False):
result.errors.append(f"System role '{roleLabel}' skipped (SysAdmin only)")
result.rolesSkipped += 1
continue
# Check if role exists (mandate role with same label) using interface method
allRoles = rootInterface.getAllRoles()
existingRoles = [
r for r in allRoles
if r.roleLabel == roleLabel
and str(r.mandateId) == str(context.mandateId)
and r.featureInstanceId is None # Only mandate-level roles
]
if existingRoles:
if updateExisting:
# Update existing role
existingRole = existingRoles[0]
roleId = existingRole.id
rootInterface.db.recordModify(
Role,
roleId,
{"description": roleData.get("description", {})}
)
# Update access rules
result.rulesUpdated += _updateAccessRules(
rootInterface,
roleId,
roleData.get("accessRules", [])
)
result.rolesUpdated += 1
else:
result.rolesSkipped += 1
continue
else:
# Create new role at mandate level
newRole = Role(
roleLabel=roleLabel,
description=coerce_text_multilingual(roleData.get("description", {})),
featureCode=featureCode,
mandateId=str(context.mandateId),
featureInstanceId=None,
isSystemRole=False # Never create system roles via import
)
createdRole = rootInterface.db.recordCreate(Role, newRole.model_dump())
roleId = createdRole.get("id")
# Create access rules
for ruleData in roleData.get("accessRules", []):
newRule = AccessRule(
roleId=roleId,
context=ruleData.get("context"),
item=ruleData.get("item"),
view=ruleData.get("view", False),
read=ruleData.get("read"),
create=ruleData.get("create"),
update=ruleData.get("update"),
delete=ruleData.get("delete")
)
rootInterface.db.recordCreate(AccessRule, newRule.model_dump())
result.rulesCreated += 1
result.rolesCreated += 1
except Exception as e:
result.errors.append(f"Error processing role '{roleData.get('roleLabel', 'unknown')}': {str(e)}")
logger.info(
f"User {context.user.id} imported mandate {context.mandateId} RBAC: "
f"{result.rolesCreated} created, {result.rolesUpdated} updated, "
f"{result.rolesSkipped} skipped"
)
return result
except HTTPException:
raise
except Exception as e:
logger.error(f"Error importing mandate RBAC: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to import RBAC: {str(e)}"
)
# =============================================================================
# Helper Functions
# =============================================================================
def _hasMandateAdminRole(context: RequestContext) -> bool:
"""
Check if the user has mandate admin role in the current context.
"""
if context.hasSysAdminRole:
return True
if not context.roleIds:
return False
try:
rootInterface = getRootInterface()
for roleId in context.roleIds:
role = rootInterface.getRole(roleId)
if role:
roleLabel = role.roleLabel
# Admin role at mandate level
if roleLabel == "admin" and not role.featureInstanceId:
return True
return False
except Exception as e:
logger.error(f"Error checking mandate admin role: {e}")
return False
def _updateAccessRules(interface, roleId: str, newRules: List[Dict[str, Any]]) -> int:
"""
Update access rules for a role.
Replaces existing rules with new ones.
Returns:
Number of rules created/updated
"""
try:
# Delete existing rules for this role using interface method
existingRules = interface.getAccessRulesByRole(roleId)
for rule in existingRules:
interface.db.recordDelete(AccessRule, rule.id)
# Create new rules
count = 0
for ruleData in newRules:
newRule = AccessRule(
roleId=roleId,
context=ruleData.get("context"),
item=ruleData.get("item"),
view=ruleData.get("view", False),
read=ruleData.get("read"),
create=ruleData.get("create"),
update=ruleData.get("update"),
delete=ruleData.get("delete")
)
interface.db.recordCreate(AccessRule, newRule.model_dump())
count += 1
return count
except Exception as e:
logger.error(f"Error updating access rules: {e}")
return 0

View file

@ -911,7 +911,7 @@ def list_roles(
result.append({ result.append({
"id": role.id, "id": role.id,
"roleLabel": role.roleLabel, "roleLabel": role.roleLabel,
"description": role.description.model_dump() if hasattr(role.description, 'model_dump') else role.description, "description": resolveText(role.description),
"mandateId": role.mandateId, "mandateId": role.mandateId,
"featureInstanceId": role.featureInstanceId, "featureInstanceId": role.featureInstanceId,
"featureCode": role.featureCode, "featureCode": role.featureCode,
@ -1040,7 +1040,7 @@ def get_roles_filter_values(
result.append({ result.append({
"id": role.id, "id": role.id,
"roleLabel": role.roleLabel, "roleLabel": role.roleLabel,
"description": role.description.model_dump() if hasattr(role.description, 'model_dump') else role.description, "description": resolveText(role.description),
"mandateId": role.mandateId, "mandateId": role.mandateId,
"featureInstanceId": role.featureInstanceId, "featureInstanceId": role.featureInstanceId,
"featureCode": role.featureCode, "featureCode": role.featureCode,
@ -1157,7 +1157,7 @@ def get_role(
return { return {
"id": role.id, "id": role.id,
"roleLabel": role.roleLabel, "roleLabel": role.roleLabel,
"description": role.description.model_dump() if hasattr(role.description, 'model_dump') else role.description, "description": resolveText(role.description),
"mandateId": role.mandateId, "mandateId": role.mandateId,
"featureInstanceId": role.featureInstanceId, "featureInstanceId": role.featureInstanceId,
"featureCode": role.featureCode, "featureCode": role.featureCode,
@ -1407,32 +1407,6 @@ def getCatalogObjects(
) )
@router.get("/catalog/stats", response_model=Dict[str, Any])
@limiter.limit("60/minute")
def getCatalogStats(
request: Request,
currentUser: User = Depends(requireSysAdminRole)
) -> Dict[str, Any]:
"""
Get statistics about the RBAC catalog.
Returns:
- Statistics about registered features, objects, and roles
"""
try:
from modules.security.rbacCatalog import getCatalogService
catalog = getCatalogService()
return catalog.getCatalogStats()
except Exception as e:
logger.error(f"Error getting catalog stats: {str(e)}")
raise HTTPException(
status_code=500,
detail=f"Failed to get catalog stats: {str(e)}"
)
# ============================================================================= # =============================================================================
# CLEANUP: Remove duplicate AccessRules # CLEANUP: Remove duplicate AccessRules
# ============================================================================= # =============================================================================

View file

@ -15,7 +15,7 @@ import logging
from modules.auth import limiter from modules.auth import limiter
from modules.auth.authentication import getRequestContext, RequestContext from modules.auth.authentication import getRequestContext, RequestContext
from modules.datamodels.datamodelUam import User, UserInDB from modules.datamodels.datamodelUam import User, UserInDB
from modules.datamodels.datamodelRbac import Role, AccessRule, AccessRuleContext from modules.datamodels.datamodelRbac import Role, AccessRule
from modules.datamodels.datamodelMembership import ( from modules.datamodels.datamodelMembership import (
UserMandate, UserMandate,
UserMandateRole, UserMandateRole,
@ -498,120 +498,3 @@ def getUserAccessOverview(
detail=f"Failed to get user access overview: {str(e)}" detail=f"Failed to get user access overview: {str(e)}"
) )
@router.get("/{userId}/effective-permissions", response_model=Dict[str, Any])
@limiter.limit("60/minute")
def getEffectivePermissions(
request: Request,
userId: str = Path(..., description="User ID"),
mandateId: str = Query(..., description="Mandate ID context"),
featureInstanceId: Optional[str] = Query(None, description="Feature instance ID context"),
accessContext: str = Query("DATA", alias="context", description="Context type: DATA, UI, or RESOURCE"),
item: Optional[str] = Query(None, description="Specific item to check permissions for"),
context: RequestContext = Depends(getRequestContext)
) -> Dict[str, Any]:
"""
Get effective (resolved) permissions for a user in a specific context.
This uses the RBAC resolution logic to show what permissions actually apply.
MULTI-TENANT: SysAdmin sees all. MandateAdmin can check users in their own mandates.
Path Parameters:
- userId: User ID
Query Parameters:
- mandateId: Required mandate context
- featureInstanceId: Optional feature instance context
- context: Permission context (DATA, UI, RESOURCE)
- item: Optional specific item to check
Returns:
- Effective permissions after RBAC resolution
"""
if not context.hasSysAdminRole:
# Check if user has admin role in any mandate
if not _hasMandateAdminRole(context):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=routeApiMsg("Admin role required"))
try:
interface = getRootInterface()
# MandateAdmin: verify the requested user shares at least one admin mandate
if not context.hasSysAdminRole:
adminMandateIds = []
adminUserMandates = interface.getUserMandates(str(context.user.id))
for um in adminUserMandates:
umId = getattr(um, 'id', None)
mid = getattr(um, 'mandateId', None)
if not umId or not mid:
continue
roleIds = interface.getRoleIdsForUserMandate(str(umId))
for roleId in roleIds:
role = interface.getRole(roleId)
if role and role.roleLabel == "admin" and not role.featureInstanceId:
adminMandateIds.append(str(mid))
break
if not adminMandateIds:
raise HTTPException(status_code=403, detail=routeApiMsg("Insufficient permissions"))
userInAdminMandate = False
for mid in adminMandateIds:
if _isUserInMandate(interface, userId, mid):
userInAdminMandate = True
break
if not userInAdminMandate:
raise HTTPException(status_code=403, detail=routeApiMsg("Benutzer gehört nicht zu Ihrem Mandate"))
# Get user
user = interface.getUser(userId)
if not user:
raise HTTPException(
status_code=404,
detail=f"User {userId} not found"
)
# Convert context string to enum
try:
contextEnum = AccessRuleContext(accessContext)
except ValueError:
raise HTTPException(
status_code=400,
detail=f"Invalid context: {accessContext}. Must be DATA, UI, or RESOURCE."
)
# Use RBAC interface to get actual permissions
from modules.security.rbac import RbacClass
rbac = RbacClass(interface.db, dbApp=interface.db)
permissions = rbac.getUserPermissions(
user=user,
context=contextEnum,
item=item or "",
mandateId=mandateId,
featureInstanceId=featureInstanceId
)
return {
"userId": userId,
"mandateId": mandateId,
"featureInstanceId": featureInstanceId,
"context": accessContext,
"item": item,
"effectivePermissions": {
"view": permissions.view,
"read": _getAccessLevelLabel(permissions.read.value if permissions.read else None),
"create": _getAccessLevelLabel(permissions.create.value if permissions.create else None),
"update": _getAccessLevelLabel(permissions.update.value if permissions.update else None),
"delete": _getAccessLevelLabel(permissions.delete.value if permissions.delete else None),
}
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting effective permissions: {str(e)}")
raise HTTPException(
status_code=500,
detail=f"Failed to get effective permissions: {str(e)}"
)

View file

@ -1453,49 +1453,6 @@ def getTransactionsAdmin(
raise HTTPException(status_code=500, detail=str(e)) raise HTTPException(status_code=500, detail=str(e))
@router.get("/admin/transactions/{targetMandateId}/filter-values")
@limiter.limit("60/minute")
def getTransactionFilterValues(
request: Request,
targetMandateId: str = Path(..., description="Mandate ID"),
column: str = Query(..., description="Column key"),
pagination: Optional[str] = Query(None, description="JSON-encoded current filters"),
ctx: RequestContext = Depends(getRequestContext),
):
"""Return distinct filter values for a column in mandate transactions."""
if not _isAdminOfMandate(ctx, targetMandateId):
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=routeApiMsg("Admin role required for this mandate"))
try:
crossFilterParams: Optional[PaginationParams] = None
if pagination:
try:
paginationDict = json.loads(pagination)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
filters = paginationDict.get("filters", {})
filters.pop(column, None)
paginationDict["filters"] = filters
paginationDict.pop("sort", None)
crossFilterParams = PaginationParams(**paginationDict)
except (json.JSONDecodeError, ValueError):
pass
try:
billingInterface = getBillingInterface(ctx.user, targetMandateId)
return billingInterface.getTransactionDistinctValues(
mandateIds=[targetMandateId],
column=column,
pagination=crossFilterParams,
)
except Exception:
enriched, _ = _buildTransactionsList(ctx, targetMandateId)
crossFiltered = _applyFiltersAndSort(enriched, crossFilterParams)
return _extractDistinctValues(crossFiltered, column)
except Exception as e:
logger.error(f"Error getting filter values for transactions: {e}")
raise HTTPException(status_code=500, detail=str(e))
# ============================================================================= # =============================================================================
# Mandate View Endpoints (for Admins) # Mandate View Endpoints (for Admins)
# ============================================================================= # =============================================================================

View file

@ -1,18 +1,17 @@
# Copyright (c) 2025 Patrick Motsch # Copyright (c) 2025 Patrick Motsch
# All rights reserved. # All rights reserved.
"""ClickUp API routes — teams, hierarchy, lists, tasks (connection-scoped).""" """ClickUp API routes — lists and tasks (connection-scoped). OAuth lives under /api/clickup/auth/* in routeSecurityClickup."""
import logging import logging
from typing import Any, Dict, Optional from typing import Any, Dict, Optional
from fastapi import APIRouter, Depends, HTTPException, Path, Query, Request, status from fastapi import APIRouter, Depends, HTTPException, Path, Query, Request, status
from pydantic import BaseModel
from modules.auth import getCurrentUser, limiter from modules.auth import getCurrentUser, limiter
from modules.datamodels.datamodelUam import AuthAuthority, User, UserConnection from modules.datamodels.datamodelUam import AuthAuthority, User, UserConnection
from modules.interfaces.interfaceDbApp import getInterface from modules.interfaces.interfaceDbApp import getInterface
from modules.serviceHub import getInterface as getServices from modules.serviceHub import getInterface as getServices
from modules.shared.i18nRegistry import apiRouteContext from modules.shared.i18nRegistry import apiRouteContext
routeApiMsg = apiRouteContext("routeClickup") routeApiMsg = apiRouteContext("routeClickup")
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -64,22 +63,6 @@ def _svc_for_connection(current_user: User, connection: UserConnection):
return services.clickup return services.clickup
# --- Routes (prefix is /api/clickup; OAuth lives under /api/clickup/auth/* in routeSecurityClickup) ---
@router.get("/{connectionId}/teams", response_model=Dict[str, Any])
@limiter.limit("30/minute")
async def get_teams(
request: Request,
connectionId: str = Path(..., description="ClickUp UserConnection id"),
currentUser: User = Depends(getCurrentUser),
) -> Dict[str, Any]:
interface = getInterface(currentUser)
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
cu = _svc_for_connection(currentUser, conn)
return await cu.getAuthorizedTeams()
@router.get("/{connectionId}/teams/{teamId}", response_model=Dict[str, Any]) @router.get("/{connectionId}/teams/{teamId}", response_model=Dict[str, Any])
@limiter.limit("60/minute") @limiter.limit("60/minute")
async def get_team( async def get_team(
@ -95,62 +78,6 @@ async def get_team(
return await cu.getTeam(teamId) return await cu.getTeam(teamId)
@router.get("/{connectionId}/teams/{teamId}/spaces", response_model=Dict[str, Any])
@limiter.limit("60/minute")
async def get_spaces(
request: Request,
connectionId: str = Path(...),
teamId: str = Path(...),
currentUser: User = Depends(getCurrentUser),
) -> Dict[str, Any]:
interface = getInterface(currentUser)
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
cu = _svc_for_connection(currentUser, conn)
return await cu.getSpaces(teamId)
@router.get("/{connectionId}/spaces/{spaceId}/folders", response_model=Dict[str, Any])
@limiter.limit("60/minute")
async def get_folders(
request: Request,
connectionId: str = Path(...),
spaceId: str = Path(...),
currentUser: User = Depends(getCurrentUser),
) -> Dict[str, Any]:
interface = getInterface(currentUser)
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
cu = _svc_for_connection(currentUser, conn)
return await cu.getFolders(spaceId)
@router.get("/{connectionId}/spaces/{spaceId}/lists", response_model=Dict[str, Any])
@limiter.limit("60/minute")
async def get_folderless_lists(
request: Request,
connectionId: str = Path(...),
spaceId: str = Path(...),
currentUser: User = Depends(getCurrentUser),
) -> Dict[str, Any]:
interface = getInterface(currentUser)
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
cu = _svc_for_connection(currentUser, conn)
return await cu.getFolderlessLists(spaceId)
@router.get("/{connectionId}/folders/{folderId}/lists", response_model=Dict[str, Any])
@limiter.limit("60/minute")
async def get_lists_in_folder(
request: Request,
connectionId: str = Path(...),
folderId: str = Path(...),
currentUser: User = Depends(getCurrentUser),
) -> Dict[str, Any]:
interface = getInterface(currentUser)
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
cu = _svc_for_connection(currentUser, conn)
return await cu.getListsInFolder(folderId)
@router.get("/{connectionId}/lists/{listId}", response_model=Dict[str, Any]) @router.get("/{connectionId}/lists/{listId}", response_model=Dict[str, Any])
@limiter.limit("60/minute") @limiter.limit("60/minute")
async def get_list( async def get_list(
@ -195,29 +122,6 @@ async def get_list_tasks(
return await cu.getTasksInList(listId, page=page, include_closed=include_closed) return await cu.getTasksInList(listId, page=page, include_closed=include_closed)
class TaskCreateBody(BaseModel):
body: Dict[str, Any]
@router.post("/{connectionId}/lists/{listId}/tasks", response_model=Dict[str, Any])
@limiter.limit("30/minute")
async def create_list_task(
request: Request,
payload: TaskCreateBody,
connectionId: str = Path(...),
listId: str = Path(...),
currentUser: User = Depends(getCurrentUser),
) -> Dict[str, Any]:
interface = getInterface(currentUser)
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
cu = _svc_for_connection(currentUser, conn)
return await cu.createTask(listId, payload.body)
class TaskUpdateBody(BaseModel):
body: Dict[str, Any]
@router.get("/{connectionId}/tasks/{taskId}", response_model=Dict[str, Any]) @router.get("/{connectionId}/tasks/{taskId}", response_model=Dict[str, Any])
@limiter.limit("60/minute") @limiter.limit("60/minute")
async def get_task( async def get_task(
@ -230,61 +134,3 @@ async def get_task(
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id) conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
cu = _svc_for_connection(currentUser, conn) cu = _svc_for_connection(currentUser, conn)
return await cu.getTask(taskId) return await cu.getTask(taskId)
@router.put("/{connectionId}/tasks/{taskId}", response_model=Dict[str, Any])
@limiter.limit("30/minute")
async def update_task(
request: Request,
payload: TaskUpdateBody,
connectionId: str = Path(...),
taskId: str = Path(...),
currentUser: User = Depends(getCurrentUser),
) -> Dict[str, Any]:
interface = getInterface(currentUser)
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
cu = _svc_for_connection(currentUser, conn)
return await cu.updateTask(taskId, payload.body)
@router.delete("/{connectionId}/tasks/{taskId}", response_model=Dict[str, Any])
@limiter.limit("30/minute")
async def delete_task(
request: Request,
connectionId: str = Path(...),
taskId: str = Path(...),
currentUser: User = Depends(getCurrentUser),
) -> Dict[str, Any]:
interface = getInterface(currentUser)
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
cu = _svc_for_connection(currentUser, conn)
return await cu.deleteTask(taskId)
@router.get("/{connectionId}/teams/{teamId}/tasks/search", response_model=Dict[str, Any])
@limiter.limit("30/minute")
async def search_team_tasks(
request: Request,
connectionId: str = Path(...),
teamId: str = Path(...),
query: str = Query(..., description="Search query"),
page: int = Query(0),
currentUser: User = Depends(getCurrentUser),
) -> Dict[str, Any]:
interface = getInterface(currentUser)
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
cu = _svc_for_connection(currentUser, conn)
return await cu.searchTeamTasks(teamId, query=query, page=page)
@router.get("/{connectionId}/user", response_model=Dict[str, Any])
@limiter.limit("30/minute")
async def get_authorized_user(
request: Request,
connectionId: str = Path(...),
currentUser: User = Depends(getCurrentUser),
) -> Dict[str, Any]:
interface = getInterface(currentUser)
conn = _clickup_connection_or_404(interface, connectionId, currentUser.id)
cu = _svc_for_connection(currentUser, conn)
return await cu.getAuthorizedUser()

View file

@ -944,44 +944,6 @@ def delete_file(
return {"message": f"File with ID {fileId} successfully deleted"} return {"message": f"File with ID {fileId} successfully deleted"}
@router.get("/stats", response_model=Dict[str, Any])
@limiter.limit("30/minute")
def get_file_stats(
request: Request,
currentUser: User = Depends(getCurrentUser)
) -> Dict[str, Any]:
"""Returns statistics about the stored files"""
try:
managementInterface = interfaceDbManagement.getInterface(currentUser)
# Get all files - metadata only
allFiles = managementInterface.getAllFiles()
# Calculate statistics
totalFiles = len(allFiles)
totalSize = sum(file.fileSize for file in allFiles)
# Group by file type
fileTypes = {}
for file in allFiles:
fileType = file.mimeType.split("/")[0]
if fileType not in fileTypes:
fileTypes[fileType] = 0
fileTypes[fileType] += 1
return {
"totalFiles": totalFiles,
"totalSizeBytes": totalSize,
"fileTypes": fileTypes
}
except Exception as e:
logger.error(f"Error retrieving file statistics: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error retrieving file statistics: {str(e)}"
)
@router.get("/{fileId}/download") @router.get("/{fileId}/download")
@limiter.limit("30/minute") @limiter.limit("30/minute")
def download_file( def download_file(

View file

@ -508,24 +508,6 @@ async def list_language_codes():
return sorted(out, key=lambda x: (not x.get("isDefault"), x["code"])) return sorted(out, key=lambda x: (not x.get("isDefault"), x["code"]))
@router.get("/user-language-options")
async def list_user_language_options():
"""Select options for User.language: all UiLanguageSets except ``xx`` (basis set).
Returns ``[{ \"value\": code, \"label\": name }, ...]`` for FormGenerator ``frontend_options`` URL.
"""
db = _publicMgmtDb()
rows = db.getRecordset(UiLanguageSet)
out: List[Dict[str, str]] = []
for r in rows:
code = r.get("id")
if not code or code == "xx":
continue
lbl = (r.get("label") or "").strip() or code
out.append({"value": code, "label": lbl})
return sorted(out, key=lambda x: (x.get("label") or x["value"]).lower())
@router.get("/sets/{code}") @router.get("/sets/{code}")
async def get_language_set(code: str): async def get_language_set(code: str):
db = _publicMgmtDb() db = _publicMgmtDb()
@ -859,33 +841,6 @@ async def sync_xx_master(
return result return result
@router.put("/sets/update-all")
async def update_all_language_sets(
request: Request,
adminUser: User = Depends(requireSysAdminRole),
):
"""Sync xx-master (if body provided), then update ALL language sets via AI."""
db = getMgmtInterface(adminUser, mandateId=None).db
fromBody = await _readOptionalEntriesFromBody(request)
xxSync: Optional[dict] = None
if fromBody is not None:
xxSync = _syncXxMaster(db, str(adminUser.id), fromBody)
if xxSync.get("error"):
return {"xxSync": xxSync, "updated": []}
rows = db.getRecordset(UiLanguageSet)
results = []
for r in rows:
cid = r["id"]
if cid == "xx":
continue
res = await _syncLanguageWithXx(db, cid, str(adminUser.id), adminUser=adminUser)
results.append(res)
await _reloadI18nCache()
return {"xxSync": xxSync, "updated": results}
@router.get("/sets/{code}/sync-diff") @router.get("/sets/{code}/sync-diff")
async def get_language_sync_diff( async def get_language_sync_diff(
code: str, code: str,

View file

@ -1,514 +0,0 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request, Query
from typing import List, Dict, Any, Optional
from fastapi import status
import logging
import json
# Import auth module
from modules.auth import limiter, getCurrentUser, getRequestContext, RequestContext
from modules.datamodels.datamodelRbac import Role
# Import interfaces
import modules.interfaces.interfaceDbManagement as interfaceDbManagement
from modules.datamodels.datamodelMessaging import (
MessagingSubscription,
MessagingSubscriptionRegistration,
MessagingDelivery,
MessagingChannel,
MessagingEventParameters,
MessagingSubscriptionExecutionResult
)
from modules.datamodels.datamodelUam import User
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResponse, PaginationMetadata, normalize_pagination_dict
from modules.shared.i18nRegistry import apiRouteContext
routeApiMsg = apiRouteContext("routeMessaging")
# Configure logger
logger = logging.getLogger(__name__)
# Create router for messaging endpoints
router = APIRouter(
prefix="/api/messaging",
tags=["Messaging"],
responses={404: {"description": "Not found"}}
)
# Subscription Endpoints
@router.get("/subscriptions", response_model=PaginatedResponse[MessagingSubscription])
@limiter.limit("60/minute")
def get_subscriptions(
request: Request,
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
currentUser: User = Depends(getCurrentUser)
) -> PaginatedResponse[MessagingSubscription]:
"""Get subscriptions with optional pagination, sorting, and filtering."""
paginationParams = None
if pagination:
try:
paginationDict = json.loads(pagination)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
paginationParams = PaginationParams(**paginationDict) if paginationDict else None
except (json.JSONDecodeError, ValueError) as e:
raise HTTPException(
status_code=400,
detail=f"Invalid pagination parameter: {str(e)}"
)
managementInterface = interfaceDbManagement.getInterface(currentUser)
result = managementInterface.getAllSubscriptions(pagination=paginationParams)
if paginationParams:
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
)
)
else:
return PaginatedResponse(
items=result,
pagination=None
)
@router.post("/subscriptions", response_model=MessagingSubscription)
@limiter.limit("60/minute")
def create_subscription(
request: Request,
subscription: MessagingSubscription,
currentUser: User = Depends(getCurrentUser)
) -> MessagingSubscription:
"""Create a new subscription"""
managementInterface = interfaceDbManagement.getInterface(currentUser)
subscriptionData = subscription.model_dump(exclude={"id"})
newSubscription = managementInterface.createSubscription(subscriptionData)
return MessagingSubscription(**newSubscription)
@router.get("/subscriptions/{subscriptionId}", response_model=MessagingSubscription)
@limiter.limit("60/minute")
def get_subscription(
request: Request,
subscriptionId: str = Path(..., description="ID of the subscription"),
currentUser: User = Depends(getCurrentUser)
) -> MessagingSubscription:
"""Get a specific subscription"""
managementInterface = interfaceDbManagement.getInterface(currentUser)
subscription = managementInterface.getSubscription(subscriptionId)
if not subscription:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Subscription with ID {subscriptionId} not found"
)
return subscription
@router.put("/subscriptions/{subscriptionId}", response_model=MessagingSubscription)
@limiter.limit("60/minute")
def update_subscription(
request: Request,
subscriptionId: str = Path(..., description="ID of the subscription to update"),
subscriptionData: MessagingSubscription = Body(...),
currentUser: User = Depends(getCurrentUser)
) -> MessagingSubscription:
"""Update an existing subscription"""
managementInterface = interfaceDbManagement.getInterface(currentUser)
existingSubscription = managementInterface.getSubscription(subscriptionId)
if not existingSubscription:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Subscription with ID {subscriptionId} not found"
)
updateData = subscriptionData.model_dump(exclude={"id", "subscriptionId"})
updatedSubscription = managementInterface.updateSubscription(subscriptionId, updateData)
if not updatedSubscription:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=routeApiMsg("Error updating the subscription")
)
return MessagingSubscription(**updatedSubscription)
@router.delete("/subscriptions/{subscriptionId}", response_model=Dict[str, Any])
@limiter.limit("60/minute")
def delete_subscription(
request: Request,
subscriptionId: str = Path(..., description="ID of the subscription to delete"),
currentUser: User = Depends(getCurrentUser)
) -> Dict[str, Any]:
"""Delete a subscription"""
managementInterface = interfaceDbManagement.getInterface(currentUser)
existingSubscription = managementInterface.getSubscription(subscriptionId)
if not existingSubscription:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Subscription with ID {subscriptionId} not found"
)
success = managementInterface.deleteSubscription(subscriptionId)
if not success:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=routeApiMsg("Error deleting the subscription")
)
return {"message": f"Subscription with ID {subscriptionId} successfully deleted"}
# Registration Endpoints
@router.get("/subscriptions/{subscriptionId}/registrations", response_model=PaginatedResponse[MessagingSubscriptionRegistration])
@limiter.limit("60/minute")
def get_subscription_registrations(
request: Request,
subscriptionId: str = Path(..., description="ID of the subscription"),
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
currentUser: User = Depends(getCurrentUser)
) -> PaginatedResponse[MessagingSubscriptionRegistration]:
"""Get registrations for a subscription"""
paginationParams = None
if pagination:
try:
paginationDict = json.loads(pagination)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
paginationParams = PaginationParams(**paginationDict) if paginationDict else None
except (json.JSONDecodeError, ValueError) as e:
raise HTTPException(
status_code=400,
detail=f"Invalid pagination parameter: {str(e)}"
)
managementInterface = interfaceDbManagement.getInterface(currentUser)
result = managementInterface.getAllRegistrations(
subscriptionId=subscriptionId,
pagination=paginationParams
)
if paginationParams:
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
)
)
else:
return PaginatedResponse(
items=result,
pagination=None
)
@router.post("/subscriptions/{subscriptionId}/subscribe", response_model=MessagingSubscriptionRegistration)
@limiter.limit("60/minute")
def subscribe_user(
request: Request,
subscriptionId: str = Path(..., description="ID of the subscription"),
channel: MessagingChannel = Body(..., embed=True),
channelConfig: str = Body(..., embed=True),
currentUser: User = Depends(getCurrentUser)
) -> MessagingSubscriptionRegistration:
"""Subscribe user to a subscription with a specific channel"""
managementInterface = interfaceDbManagement.getInterface(currentUser)
registration = managementInterface.subscribeUser(
subscriptionId=subscriptionId,
userId=currentUser.id,
channel=channel,
channelConfig=channelConfig
)
return MessagingSubscriptionRegistration(**registration)
@router.delete("/subscriptions/{subscriptionId}/unsubscribe", response_model=Dict[str, Any])
@limiter.limit("60/minute")
def unsubscribe_user(
request: Request,
subscriptionId: str = Path(..., description="ID of the subscription"),
channel: MessagingChannel = Body(..., embed=True),
currentUser: User = Depends(getCurrentUser)
) -> Dict[str, Any]:
"""Unsubscribe user from a subscription for a specific channel"""
managementInterface = interfaceDbManagement.getInterface(currentUser)
success = managementInterface.unsubscribeUser(
subscriptionId=subscriptionId,
userId=currentUser.id,
channel=channel
)
if not success:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=routeApiMsg("Registration not found")
)
return {"message": f"Successfully unsubscribed from {subscriptionId} for channel {channel.value}"}
@router.get("/registrations", response_model=PaginatedResponse[MessagingSubscriptionRegistration])
@limiter.limit("60/minute")
def get_my_registrations(
request: Request,
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
currentUser: User = Depends(getCurrentUser)
) -> PaginatedResponse[MessagingSubscriptionRegistration]:
"""Get own registrations"""
paginationParams = None
if pagination:
try:
paginationDict = json.loads(pagination)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
paginationParams = PaginationParams(**paginationDict) if paginationDict else None
except (json.JSONDecodeError, ValueError) as e:
raise HTTPException(
status_code=400,
detail=f"Invalid pagination parameter: {str(e)}"
)
managementInterface = interfaceDbManagement.getInterface(currentUser)
result = managementInterface.getAllRegistrations(
userId=currentUser.id,
pagination=paginationParams
)
if paginationParams:
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
)
)
else:
return PaginatedResponse(
items=result,
pagination=None
)
@router.put("/registrations/{registrationId}", response_model=MessagingSubscriptionRegistration)
@limiter.limit("60/minute")
def update_registration(
request: Request,
registrationId: str = Path(..., description="ID of the registration to update"),
registrationData: MessagingSubscriptionRegistration = Body(...),
currentUser: User = Depends(getCurrentUser)
) -> MessagingSubscriptionRegistration:
"""Update a registration"""
managementInterface = interfaceDbManagement.getInterface(currentUser)
existingRegistration = managementInterface.getRegistration(registrationId)
if not existingRegistration:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Registration with ID {registrationId} not found"
)
updateData = registrationData.model_dump(exclude={"id", "subscriptionId", "userId"})
updatedRegistration = managementInterface.updateRegistration(registrationId, updateData)
if not updatedRegistration:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=routeApiMsg("Error updating the registration")
)
return MessagingSubscriptionRegistration(**updatedRegistration)
@router.delete("/registrations/{registrationId}", response_model=Dict[str, Any])
@limiter.limit("60/minute")
def delete_registration(
request: Request,
registrationId: str = Path(..., description="ID of the registration to delete"),
currentUser: User = Depends(getCurrentUser)
) -> Dict[str, Any]:
"""Delete a registration"""
managementInterface = interfaceDbManagement.getInterface(currentUser)
existingRegistration = managementInterface.getRegistration(registrationId)
if not existingRegistration:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Registration with ID {registrationId} not found"
)
success = managementInterface.deleteRegistration(registrationId)
if not success:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=routeApiMsg("Error deleting the registration")
)
return {"message": f"Registration with ID {registrationId} successfully deleted"}
# Trigger Endpoints
def _getTriggerKey(request: Request) -> str:
"""Custom key function for trigger rate limiting per subscriptionId"""
subscriptionId = request.path_params.get("subscriptionId", "unknown")
return f"{request.client.host}:{subscriptionId}"
@router.post("/trigger/{subscriptionId}", response_model=MessagingSubscriptionExecutionResult)
@limiter.limit("60/minute", key_func=_getTriggerKey)
def trigger_subscription(
request: Request,
subscriptionId: str = Path(..., description="ID of the subscription to trigger"),
eventParameters: Dict[str, Any] = Body(...),
context: RequestContext = Depends(getRequestContext)
) -> MessagingSubscriptionExecutionResult:
"""
Trigger a subscription with event parameters.
Requires Mandate-Admin role or SysAdmin.
"""
# RBAC-Check: Admin or Mandate-Admin can trigger
if not _hasTriggerPermission(context):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Admin or Mandate-Admin role required to trigger subscriptions")
)
# Get messaging service from request app state
from modules.serviceHub import getInterface as getServicesInterface
services = getServicesInterface(context.user, None, mandateId=str(context.mandateId))
# Konvertiere Dict zu Pydantic Model
eventParams = MessagingEventParameters(triggerData=eventParameters)
executionResult = services.messaging.executeSubscription(subscriptionId, eventParams)
return executionResult
def _hasTriggerPermission(context: RequestContext) -> bool:
"""
Check if user has permission to trigger subscriptions.
Requires admin or mandate-admin role.
"""
if context.hasSysAdminRole:
return True
if not context.roleIds:
return False
try:
from modules.interfaces.interfaceDbApp import getRootInterface
rootInterface = getRootInterface()
for roleId in context.roleIds:
role = rootInterface.getRole(roleId)
if role:
roleLabel = role.roleLabel
# Admin role at mandate level or system admin
if roleLabel in ("admin", "sysadmin"):
return True
return False
except Exception as e:
logger.error(f"Error checking trigger permission: {e}")
return False
# Delivery Endpoints
@router.get("/deliveries", response_model=PaginatedResponse[MessagingDelivery])
@limiter.limit("60/minute")
def get_deliveries(
request: Request,
subscriptionId: Optional[str] = Query(None, description="Filter by subscription ID"),
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
currentUser: User = Depends(getCurrentUser)
) -> PaginatedResponse[MessagingDelivery]:
"""Get delivery history"""
paginationParams = None
if pagination:
try:
paginationDict = json.loads(pagination)
if paginationDict:
paginationDict = normalize_pagination_dict(paginationDict)
paginationParams = PaginationParams(**paginationDict) if paginationDict else None
except (json.JSONDecodeError, ValueError) as e:
raise HTTPException(
status_code=400,
detail=f"Invalid pagination parameter: {str(e)}"
)
managementInterface = interfaceDbManagement.getInterface(currentUser)
result = managementInterface.getDeliveries(
subscriptionId=subscriptionId,
userId=currentUser.id, # Users can only see their own deliveries
pagination=paginationParams
)
if paginationParams:
return PaginatedResponse(
items=result.items,
pagination=PaginationMetadata(
currentPage=paginationParams.page,
pageSize=paginationParams.pageSize,
totalItems=result.totalItems,
totalPages=result.totalPages,
sort=paginationParams.sort,
filters=paginationParams.filters
)
)
else:
return PaginatedResponse(
items=result,
pagination=None
)
@router.get("/deliveries/{deliveryId}", response_model=MessagingDelivery)
@limiter.limit("60/minute")
def get_delivery(
request: Request,
deliveryId: str = Path(..., description="ID of the delivery"),
currentUser: User = Depends(getCurrentUser)
) -> MessagingDelivery:
"""Get a specific delivery"""
managementInterface = interfaceDbManagement.getInterface(currentUser)
delivery = managementInterface.getDelivery(deliveryId)
if not delivery:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Delivery with ID {deliveryId} not found"
)
return delivery

View file

@ -1,435 +0,0 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""
Security Administration routes.
MULTI-TENANT: These are SYSTEM-LEVEL operations requiring isSysAdmin=true.
No mandate context - SysAdmin manages infrastructure, not data.
"""
from fastapi import APIRouter, HTTPException, Depends, status, Request, Body
from fastapi.responses import FileResponse, JSONResponse
from typing import Optional, Dict, Any, List
import os
import logging
from modules.auth import getCurrentUser, limiter, requireSysAdmin
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.datamodels.datamodelUam import User, UserInDB, AuthAuthority
from modules.datamodels.datamodelSecurity import Token
from modules.shared.configuration import APP_CONFIG
from modules.shared.i18nRegistry import apiRouteContext
routeApiMsg = apiRouteContext("routeSecurityAdmin")
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/api/admin",
tags=["Security Administration"],
responses={
404: {"description": "Not found"},
400: {"description": "Bad request"},
401: {"description": "Unauthorized"},
403: {"description": "Forbidden"},
500: {"description": "Internal server error"}
}
)
def _getPoweronDatabases() -> List[str]:
"""Load databases from PostgreSQL host matching poweron_%."""
dbHost = APP_CONFIG.get("DB_HOST")
dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
# Connect to 'postgres' system database to query all databases
connector = DatabaseConnector(
dbHost=dbHost,
dbDatabase="postgres",
dbUser=dbUser,
dbPassword=dbPassword,
dbPort=dbPort,
userId=None
)
try:
with connector.connection.cursor() as cursor:
cursor.execute(
"""
SELECT datname
FROM pg_database
WHERE datname LIKE 'poweron_%'
AND datistemplate = false
ORDER BY datname
"""
)
rows = cursor.fetchall()
return [row["datname"] for row in rows if row.get("datname")]
finally:
connector.close()
def _getDatabaseConnector(databaseName: str, userId: str = None) -> DatabaseConnector:
"""
Create a generic DatabaseConnector for any poweron_* database.
Fully dynamic - no interface mapping needed.
"""
if not databaseName.startswith("poweron_"):
raise ValueError(f"Invalid database name: {databaseName}")
dbHost = APP_CONFIG.get("DB_HOST")
dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
connector = DatabaseConnector(
dbHost=dbHost,
dbDatabase=databaseName,
dbUser=dbUser,
dbPassword=dbPassword,
dbPort=dbPort,
userId=userId
)
return connector
# ----------------------
# Token listing and revocation
# ----------------------
@router.get("/tokens")
@limiter.limit("30/minute")
def list_tokens(
request: Request,
currentUser: User = Depends(requireSysAdmin),
userId: Optional[str] = None,
authority: Optional[str] = None,
sessionId: Optional[str] = None,
statusFilter: Optional[str] = None,
connectionId: Optional[str] = None,
) -> List[Dict[str, Any]]:
"""
List all tokens in the system.
MULTI-TENANT: SysAdmin-only, no mandate filter (system-level view).
"""
try:
appInterface = getRootInterface()
recordFilter: Dict[str, Any] = {}
if userId:
recordFilter["userId"] = userId
if authority:
recordFilter["authority"] = authority
if sessionId:
recordFilter["sessionId"] = sessionId
if connectionId:
recordFilter["connectionId"] = connectionId
if statusFilter:
recordFilter["status"] = statusFilter
# MULTI-TENANT: SysAdmin sees ALL tokens (no mandate filter)
# Use interface method to get tokens with flexible filtering
tokens = appInterface.getAllTokens(recordFilter=recordFilter)
return tokens
except HTTPException:
raise
except Exception as e:
logger.error(f"Error listing tokens: {str(e)}")
raise HTTPException(status_code=500, detail=routeApiMsg("Failed to list tokens"))
@router.post("/tokens/revoke/user")
@limiter.limit("30/minute")
def revoke_tokens_by_user(
request: Request,
currentUser: User = Depends(requireSysAdmin),
payload: Dict[str, Any] = Body(...)
) -> Dict[str, Any]:
"""
Revoke all tokens for a user.
MULTI-TENANT: SysAdmin-only, can revoke across all mandates.
"""
try:
userId = payload.get("userId")
authority = payload.get("authority")
reason = payload.get("reason", "sysadmin revoke")
if not userId:
raise HTTPException(status_code=400, detail=routeApiMsg("userId is required"))
appInterface = getRootInterface()
# MULTI-TENANT: SysAdmin can revoke any user's tokens (no mandate restriction)
count = appInterface.revokeTokensByUser(
userId=userId,
authority=AuthAuthority(authority) if authority else None,
mandateId=None, # SysAdmin: no mandate filter
revokedBy=currentUser.id,
reason=reason
)
return {"revoked": count}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error revoking tokens by user: {str(e)}")
raise HTTPException(status_code=500, detail=routeApiMsg("Failed to revoke tokens"))
@router.post("/tokens/revoke/session")
@limiter.limit("30/minute")
def revoke_tokens_by_session(
request: Request,
currentUser: User = Depends(requireSysAdmin),
payload: Dict[str, Any] = Body(...)
) -> Dict[str, Any]:
"""
Revoke all tokens for a specific session.
MULTI-TENANT: SysAdmin-only.
"""
try:
userId = payload.get("userId")
sessionId = payload.get("sessionId")
authority = payload.get("authority", "local")
reason = payload.get("reason", "sysadmin session revoke")
if not userId or not sessionId:
raise HTTPException(status_code=400, detail=routeApiMsg("userId and sessionId are required"))
appInterface = getRootInterface()
# MULTI-TENANT: SysAdmin can revoke any session (no mandate check)
count = appInterface.revokeTokensBySessionId(
sessionId=sessionId,
userId=userId,
authority=AuthAuthority(authority),
revokedBy=currentUser.id,
reason=reason
)
return {"revoked": count}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error revoking tokens by session: {str(e)}")
raise HTTPException(status_code=500, detail=routeApiMsg("Failed to revoke session tokens"))
@router.post("/tokens/revoke/id")
@limiter.limit("30/minute")
def revoke_token_by_id(
request: Request,
currentUser: User = Depends(requireSysAdmin),
payload: Dict[str, Any] = Body(...)
) -> Dict[str, Any]:
"""
Revoke a specific token by ID.
MULTI-TENANT: SysAdmin-only.
"""
try:
tokenId = payload.get("tokenId")
reason = payload.get("reason", "sysadmin revoke")
if not tokenId:
raise HTTPException(status_code=400, detail=routeApiMsg("tokenId is required"))
appInterface = getRootInterface()
# MULTI-TENANT: SysAdmin can revoke any token (no mandate check)
ok = appInterface.revokeTokenById(tokenId, revokedBy=currentUser.id, reason=reason)
return {"revoked": 1 if ok else 0}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error revoking token by id: {str(e)}")
raise HTTPException(status_code=500, detail=routeApiMsg("Failed to revoke token"))
@router.post("/tokens/revoke/mandate")
@limiter.limit("10/minute")
def revoke_tokens_by_mandate(
request: Request,
currentUser: User = Depends(requireSysAdmin),
payload: Dict[str, Any] = Body(...)
) -> Dict[str, Any]:
"""
Revoke all tokens for users in a mandate.
MULTI-TENANT: SysAdmin-only, can revoke tokens for any mandate.
"""
try:
mandateId = payload.get("mandateId")
authority = payload.get("authority", "local")
reason = payload.get("reason", "sysadmin mandate revoke")
if not mandateId:
raise HTTPException(status_code=400, detail=routeApiMsg("mandateId is required"))
# MULTI-TENANT: SysAdmin can revoke tokens for any mandate
appInterface = getRootInterface()
# Get all UserMandate entries for this mandate to find users using interface method
userMandates = appInterface.getUserMandatesByMandate(mandateId)
total = 0
for um in userMandates:
total += appInterface.revokeTokensByUser(
userId=um.userId,
authority=AuthAuthority(authority) if authority else None,
mandateId=None, # Revoke all tokens for user
revokedBy=currentUser.id,
reason=reason
)
return {"revoked": total}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error revoking tokens by mandate: {str(e)}")
raise HTTPException(status_code=500, detail=routeApiMsg("Failed to revoke mandate tokens"))
# ----------------------
# Database admin
# ----------------------
@router.get("/databases")
@limiter.limit("10/minute")
def list_databases(
request: Request,
currentUser: User = Depends(requireSysAdmin)
) -> Dict[str, Any]:
"""
List all poweron_* databases.
MULTI-TENANT: SysAdmin-only (infrastructure management).
"""
try:
databases = _getPoweronDatabases()
return {"databases": databases}
except Exception as e:
logger.error(f"Failed to load databases from host: {e}")
raise HTTPException(status_code=500, detail=routeApiMsg("Failed to load databases from host"))
@router.get("/databases/{database_name}/tables")
@limiter.limit("30/minute")
def get_database_tables(
request: Request,
database_name: str,
currentUser: User = Depends(requireSysAdmin)
) -> Dict[str, Any]:
"""
List tables in a database.
MULTI-TENANT: SysAdmin-only (infrastructure management).
"""
if not database_name.startswith("poweron_"):
raise HTTPException(status_code=400, detail=routeApiMsg("Invalid database name format"))
connector = None
try:
connector = _getDatabaseConnector(database_name, currentUser.id)
tables = connector.getTables()
return {"tables": tables}
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
logger.error(f"Error getting database tables: {str(e)}")
raise HTTPException(status_code=500, detail=f"Failed to get database tables: {str(e)}")
finally:
if connector:
connector.close()
@router.post("/databases/{database_name}/tables/{table_name}/drop")
@limiter.limit("10/minute")
def drop_table(
request: Request,
database_name: str,
table_name: str,
currentUser: User = Depends(requireSysAdmin),
payload: Dict[str, Any] = Body(...)
) -> Dict[str, Any]:
"""
Drop a table from a database.
MULTI-TENANT: SysAdmin-only (infrastructure management).
"""
if not database_name.startswith("poweron_"):
raise HTTPException(status_code=400, detail=routeApiMsg("Invalid database name format"))
connector = None
try:
connector = _getDatabaseConnector(database_name, currentUser.id)
conn = connector.connection
with conn.cursor() as cursor:
# Check if table exists
cursor.execute("""
SELECT table_name FROM information_schema.tables
WHERE table_schema = 'public' AND table_name = %s
""", (table_name,))
if not cursor.fetchone():
raise HTTPException(status_code=404, detail=routeApiMsg("Table not found"))
# Drop the table
cursor.execute(f'DROP TABLE IF EXISTS "{table_name}" CASCADE')
conn.commit()
logger.warning(f"Admin drop_table executed by {currentUser.id}: dropped table '{table_name}' from database '{database_name}'")
return {"message": f"Table '{table_name}' dropped successfully from database '{database_name}'"}
except HTTPException:
raise
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
logger.error(f"Error dropping table: {str(e)}")
if connector and connector.connection:
connector.connection.rollback()
raise HTTPException(status_code=500, detail=routeApiMsg("Failed to drop table"))
finally:
if connector:
connector.close()
@router.post("/databases/drop")
@limiter.limit("5/minute")
def drop_database(
request: Request,
currentUser: User = Depends(requireSysAdmin),
payload: Dict[str, Any] = Body(...)
) -> Dict[str, Any]:
"""
Drop all tables in a database.
MULTI-TENANT: SysAdmin-only (infrastructure management).
"""
dbName = payload.get("database")
if not dbName or not dbName.startswith("poweron_"):
raise HTTPException(status_code=400, detail=routeApiMsg("Invalid database name"))
# Validate database exists
try:
configuredDbs = _getPoweronDatabases()
except Exception as e:
logger.warning(f"Failed to load databases from host: {e}")
configuredDbs = []
if configuredDbs and dbName not in configuredDbs:
raise HTTPException(status_code=400, detail=f"Database not found. Available: {configuredDbs}")
connector = None
try:
connector = _getDatabaseConnector(dbName, currentUser.id)
conn = connector.connection
with conn.cursor() as cursor:
# Drop all user tables (public schema)
cursor.execute("""
SELECT table_name FROM information_schema.tables
WHERE table_schema = 'public' AND table_type = 'BASE TABLE'
""")
tables = [row['table_name'] for row in cursor.fetchall()]
dropped = []
for tbl in tables:
cursor.execute(f'DROP TABLE IF EXISTS "{tbl}" CASCADE')
dropped.append(tbl)
conn.commit()
logger.warning(f"Admin drop_database executed by {currentUser.id}: dropped tables from '{dbName}': {dropped}")
return {"droppedTables": dropped}
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
logger.error(f"Error dropping database tables: {str(e)}")
if connector and connector.connection:
connector.connection.rollback()
raise HTTPException(status_code=500, detail=routeApiMsg("Failed to drop database tables"))
finally:
if connector:
connector.close()

View file

@ -111,24 +111,6 @@ DATA_CLIENT_SECRET = APP_CONFIG.get("Service_GOOGLE_DATA_CLIENT_SECRET")
DATA_REDIRECT_URI = APP_CONFIG.get("Service_GOOGLE_DATA_REDIRECT_URI") DATA_REDIRECT_URI = APP_CONFIG.get("Service_GOOGLE_DATA_REDIRECT_URI")
@router.get("/config")
def get_config():
"""Debug: OAuth configuration (Auth vs Data apps)."""
return {
"auth_client_id": AUTH_CLIENT_ID,
"auth_client_secret": "***" if AUTH_CLIENT_SECRET else None,
"auth_redirect_uri": AUTH_REDIRECT_URI,
"auth_scopes": googleAuthScopes,
"data_client_id": DATA_CLIENT_ID,
"data_client_secret": "***" if DATA_CLIENT_SECRET else None,
"data_redirect_uri": DATA_REDIRECT_URI,
"data_scopes": googleDataScopes,
"config_loaded": bool(
AUTH_CLIENT_ID and AUTH_CLIENT_SECRET and AUTH_REDIRECT_URI and DATA_CLIENT_ID and DATA_CLIENT_SECRET and DATA_REDIRECT_URI
),
}
def _require_google_auth_config(): def _require_google_auth_config():
if not AUTH_CLIENT_ID or not AUTH_CLIENT_SECRET or not AUTH_REDIRECT_URI: if not AUTH_CLIENT_ID or not AUTH_CLIENT_SECRET or not AUTH_REDIRECT_URI:
raise HTTPException( raise HTTPException(
@ -620,52 +602,6 @@ def logout(
) )
@router.post("/verify")
@limiter.limit("30/minute")
async def verify_token(
request: Request,
currentUser: User = Depends(getCurrentUser),
) -> Dict[str, Any]:
try:
appInterface = getInterface(currentUser)
connections = appInterface.getUserConnections(currentUser.id)
google_connection = None
for conn in connections:
if conn.authority == AuthAuthority.GOOGLE:
google_connection = conn
break
if not google_connection:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=routeApiMsg("No Google connection found for current user"),
)
current_token = TokenManager().getFreshToken(google_connection.id)
if not current_token:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=routeApiMsg("No Google token found for this connection"),
)
token_verification = await verify_google_token(current_token.tokenAccess)
return {
"valid": token_verification.get("valid", False),
"scopes": token_verification.get("scopes", []),
"expires_in": token_verification.get("expires_in", 0),
"email": token_verification.get("email"),
"user_id": token_verification.get("user_id"),
"error": token_verification.get("error")
if not token_verification.get("valid")
else None,
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error verifying Google token: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to verify token: {str(e)}",
)
@router.post("/refresh") @router.post("/refresh")
@limiter.limit("10/minute") @limiter.limit("10/minute")
async def refresh_token( async def refresh_token(

View file

@ -678,24 +678,6 @@ def logout(
) )
@router.post("/cleanup")
@limiter.limit("5/minute")
def cleanup_expired_tokens(
request: Request,
currentUser: User = Depends(getCurrentUser),
) -> Dict[str, Any]:
try:
appInterface = getInterface(currentUser)
cleaned_count = appInterface.cleanupExpiredTokens()
return {"message": "Cleanup completed successfully", "tokens_cleaned": cleaned_count}
except Exception as e:
logger.error(f"Error cleaning up expired tokens: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to cleanup expired tokens: {str(e)}",
)
@router.post("/refresh") @router.post("/refresh")
@limiter.limit("10/minute") @limiter.limit("10/minute")
async def refresh_token( async def refresh_token(

View file

@ -29,18 +29,6 @@ router = APIRouter(
} }
) )
def _getUserConnection(interface, connectionId: str, userId: str) -> Optional[UserConnection]:
"""Get a user connection by ID, ensuring it belongs to the user"""
try:
connections = interface.getUserConnections(userId)
for conn in connections:
if conn.id == connectionId:
return conn
return None
except Exception as e:
logger.error(f"Error getting user connection: {str(e)}")
return None
def _getUserConnectionByReference(interface, connectionReference: str, userId: str) -> Optional[UserConnection]: def _getUserConnectionByReference(interface, connectionReference: str, userId: str) -> Optional[UserConnection]:
""" """
Get a user connection by reference string (format: connection:authority:username). Get a user connection by reference string (format: connection:authority:username).
@ -79,211 +67,6 @@ def _getUserConnectionByReference(interface, connectionReference: str, userId: s
logger.error(f"Error getting user connection by reference: {str(e)}") logger.error(f"Error getting user connection by reference: {str(e)}")
return None return None
@router.get("/{connectionId}/sites", response_model=List[Dict[str, Any]])
@limiter.limit("30/minute")
async def get_sharepoint_sites(
request: Request,
connectionId: str = Path(..., description="Microsoft connection ID"),
currentUser: User = Depends(getCurrentUser)
) -> List[Dict[str, Any]]:
"""Get all SharePoint sites accessible via a Microsoft connection"""
try:
interface = getInterface(currentUser)
# Get the connection and verify it belongs to the user
connection = _getUserConnection(interface, connectionId, currentUser.id)
if not connection:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Connection {connectionId} not found or does not belong to user"
)
# Verify it's a Microsoft connection
authority = connection.authority.value if hasattr(connection.authority, 'value') else str(connection.authority)
if authority.lower() != 'msft':
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Connection {connectionId} is not a Microsoft connection"
)
# Initialize services
services = getServices(currentUser, None)
# Set access token on SharePoint service
if not services.sharepoint.setAccessTokenFromConnection(connection):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=routeApiMsg("Failed to set SharePoint access token. Connection may be expired or invalid.")
)
# Discover SharePoint sites
sites = await services.sharepoint.discoverSites()
return sites
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting SharePoint sites: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error getting SharePoint sites: {str(e)}"
)
@router.get("/{connectionId}/sites/{siteId}/folders", response_model=List[Dict[str, Any]])
@limiter.limit("60/minute")
async def list_sharepoint_folders(
request: Request,
connectionId: str = Path(..., description="Microsoft connection ID"),
siteId: str = Path(..., description="SharePoint site ID"),
path: Optional[str] = Query(None, description="Folder path (empty for root)"),
currentUser: User = Depends(getCurrentUser)
) -> List[Dict[str, Any]]:
"""List folder contents for a SharePoint site and folder path"""
try:
interface = getInterface(currentUser)
# Get the connection and verify it belongs to the user
connection = _getUserConnection(interface, connectionId, currentUser.id)
if not connection:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Connection {connectionId} not found or does not belong to user"
)
# Verify it's a Microsoft connection
authority = connection.authority.value if hasattr(connection.authority, 'value') else str(connection.authority)
if authority.lower() != 'msft':
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Connection {connectionId} is not a Microsoft connection"
)
# Initialize services
services = getServices(currentUser, None)
# Set access token on SharePoint service
if not services.sharepoint.setAccessTokenFromConnection(connection):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=routeApiMsg("Failed to set SharePoint access token. Connection may be expired or invalid.")
)
# Normalize folder path (empty string for root)
folderPath = path or ''
# List folder contents
items = await services.sharepoint.listFolderContents(siteId, folderPath)
return items or []
except HTTPException:
raise
except Exception as e:
logger.error(f"Error listing SharePoint folders: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error listing SharePoint folders: {str(e)}"
)
@router.get("/{connectionId}/folder-options", response_model=List[Dict[str, Any]])
@limiter.limit("30/minute")
async def getSharepointFolderOptions(
request: Request,
connectionId: str = Path(..., description="Microsoft connection ID"),
siteId: Optional[str] = Query(None, description="Specific site ID to browse (if omitted, returns sites only)"),
path: Optional[str] = Query(None, description="Folder path within site to browse"),
currentUser: User = Depends(getCurrentUser)
) -> List[Dict[str, Any]]:
"""
Get SharePoint folders formatted as dropdown options.
Two modes:
1. If siteId is not provided: Returns list of sites (for site selection)
2. If siteId is provided: Returns folders within that site (optionally at specific path)
This avoids expensive iteration through all sites and folders.
"""
try:
interface = getInterface(currentUser)
# Get the connection and verify it belongs to the user
connection = _getUserConnection(interface, connectionId, currentUser.id)
if not connection:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Connection {connectionId} not found or does not belong to user"
)
# Verify it's a Microsoft connection
authority = connection.authority.value if hasattr(connection.authority, 'value') else str(connection.authority)
if authority.lower() != 'msft':
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Connection {connectionId} is not a Microsoft connection"
)
# Initialize services
services = getServices(currentUser, None)
# Set access token on SharePoint service
if not services.sharepoint.setAccessTokenFromConnection(connection):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=routeApiMsg("Failed to set SharePoint access token. Connection may be expired or invalid.")
)
# Mode 1: Return sites list if no siteId specified
if not siteId:
sites = await services.sharepoint.discoverSites()
return [
{
"type": "site",
"value": site.get("id"),
"label": site.get("displayName", "Unknown Site"),
"siteId": site.get("id"),
"siteName": site.get("displayName", "Unknown Site"),
"webUrl": site.get("webUrl", ""),
"path": _extractSitePath(site.get("webUrl", ""))
}
for site in sites
]
# Mode 2: Return folders within specific site
folderPath = path or ""
items = await services.sharepoint.listFolderContents(siteId, folderPath)
if not items:
return []
folderOptions = []
for item in items:
if item.get("type") == "folder":
folderName = item.get("name", "")
itemPath = f"{folderPath}/{folderName}" if folderPath else folderName
folderOptions.append({
"type": "folder",
"value": itemPath,
"label": folderName,
"siteId": siteId,
"folderName": folderName,
"path": itemPath,
"hasChildren": True # Assume folders may have children
})
return folderOptions
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting SharePoint folder options: {str(e)}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error getting SharePoint folder options: {str(e)}"
)
def _extractSitePath(webUrl: str) -> str: def _extractSitePath(webUrl: str) -> str:
"""Extract site path from webUrl (e.g., https://company.sharepoint.com/sites/MySite -> /sites/MySite)""" """Extract site path from webUrl (e.g., https://company.sharepoint.com/sites/MySite -> /sites/MySite)"""

View file

@ -11,7 +11,9 @@ Navigation API Konzept:
""" """
import logging import logging
from typing import Dict, List, Any, Optional import time
from collections import Counter
from typing import Dict, List, Any, Optional, Set
from fastapi import APIRouter, Depends, Request from fastapi import APIRouter, Depends, Request
from slowapi import Limiter from slowapi import Limiter
from slowapi.util import get_remote_address from slowapi.util import get_remote_address
@ -255,6 +257,7 @@ def _buildDynamicBlock(
featuresMap[featureKey]["instances"].append({ featuresMap[featureKey]["instances"].append({
"id": str(instance.id), "id": str(instance.id),
"uiLabel": instance.label, "uiLabel": instance.label,
"featureCode": instance.featureCode,
"order": 10, "order": 10,
"views": views, "views": views,
"isAdmin": permissions.get("isAdmin", False), "isAdmin": permissions.get("isAdmin", False),
@ -510,4 +513,442 @@ def get_navigation(
return { return {
"blocks": [], "blocks": [],
"error": str(e), "error": str(e),
}
# =============================================================================
# AI models (integrations overview)
# =============================================================================
def _buildIntegrationsOverviewPayload(userId: str, user=None) -> Dict[str, Any]:
"""
Single payload for the Integrations architecture page: real UserConnections,
DataSource / FeatureDataSource rows, trustee accounting bindings, AICore
connector modules (not individual models), extractor extensions and renderer
formats from registries, platform infra tools, and live KPI stats.
"""
root = getRootInterface()
out: Dict[str, Any] = {
"aicoreModules": [],
"infraTools": [],
"extractorExtensions": [],
"extractorClasses": [],
"rendererFormats": [],
"rendererClasses": [],
"dataLayerItems": [],
"liveStats": {},
"errors": [],
} }
_PROVIDER_LABELS = {
"anthropic": "Anthropic (Claude)",
"openai": "OpenAI (GPT)",
"mistral": "Mistral (Le Chat)",
"perplexity": "Perplexity",
"tavily": "Tavily (Websuche)",
"privatellm": "Private LLM",
"internal": "Intern",
}
# --- AICore: one entry per connector module + model counts ---
try:
from modules.aicore.aicoreModelRegistry import modelRegistry
modelRegistry.ensureConnectorsRegistered()
modelRegistry.refreshModels(force=False)
counts = Counter()
for m in modelRegistry.getModels():
if not getattr(m, "isAvailable", True):
continue
counts[str(getattr(m, "connectorType", "") or "")] += 1
modules: List[Dict[str, Any]] = []
for conn in modelRegistry.discoverConnectors():
ct = conn.getConnectorType()
modules.append(
{
"connectorType": ct,
"label": _PROVIDER_LABELS.get(ct, ct),
"modelCount": int(counts.get(ct, 0)),
}
)
out["aicoreModules"] = modules
except Exception as e:
logger.error(f"integrations-overview aicore: {e}")
out["errors"].append(f"aicore: {e}")
# --- Extractors (registered extensions, unique + per-class rows) ---
try:
from modules.serviceCenter.services.serviceExtraction.mainServiceExtraction import ExtractionService
from modules.serviceCenter.services.serviceExtraction.subRegistry import ExtractorRegistry
if ExtractionService._sharedExtractorRegistry is None:
ExtractionService._sharedExtractorRegistry = ExtractorRegistry()
reg = ExtractionService._sharedExtractorRegistry
ext_map = reg.getExtensionToMimeMap()
uniq = sorted({str(k).upper() for k in ext_map.keys() if k and "." not in str(k)})
out["extractorExtensions"] = uniq
seen_ext: Set[int] = set()
class_rows: List[Dict[str, Any]] = []
for extractor in reg._map.values():
eid = id(extractor)
if eid in seen_ext:
continue
seen_ext.add(eid)
if not hasattr(extractor, "getSupportedExtensions"):
continue
raw_exts = extractor.getSupportedExtensions()
if not raw_exts:
continue
norm = sorted({str(x).lstrip(".").lower() for x in raw_exts if x})
if norm:
class_rows.append({"className": extractor.__class__.__name__, "extensions": norm})
class_rows.sort(key=lambda r: r["className"])
out["extractorClasses"] = class_rows
fb = getattr(reg, "_fallback", None)
if fb and hasattr(fb, "getSupportedExtensions") and id(fb) not in seen_ext:
raw_exts = fb.getSupportedExtensions()
if raw_exts:
norm = sorted({str(x).lstrip(".").lower() for x in raw_exts if x})
if norm:
out["extractorClasses"].append({"className": fb.__class__.__name__, "extensions": norm})
out["extractorClasses"].sort(key=lambda r: r["className"])
except Exception as e:
logger.error(f"integrations-overview extractors: {e}")
out["errors"].append(f"extractors: {e}")
# --- Renderers (registered output formats + per-class rows) ---
try:
from modules.serviceCenter.services.serviceGeneration.renderers.registry import getSupportedFormats, getRendererInfo
out["rendererFormats"] = sorted(getSupportedFormats())
by_renderer_class: Dict[str, Dict[str, Any]] = {}
for composite_key, meta in getRendererInfo().items():
cn = meta.get("class_name") or ""
if not cn:
continue
fmt = composite_key.split(":")[0] if ":" in composite_key else composite_key
if cn not in by_renderer_class:
by_renderer_class[cn] = {"className": cn, "formats": set()}
by_renderer_class[cn]["formats"].add(fmt)
renderer_rows = [
{"className": d["className"], "formats": sorted(d["formats"])}
for _, d in sorted(by_renderer_class.items(), key=lambda x: x[0])
]
out["rendererClasses"] = renderer_rows
except Exception as e:
logger.error(f"integrations-overview renderers: {e}")
out["errors"].append(f"renderers: {e}")
# --- Platform infra tools (only routes that exist in this deployment) ---
out["infraTools"] = [
{"id": "voice", "label": "Voice / STT"},
]
accessible_instance_ids: Set[str] = set()
try:
for access in root.getFeatureAccessesForUser(userId):
if not getattr(access, "enabled", True):
continue
accessible_instance_ids.add(str(access.featureInstanceId))
except Exception as e:
logger.debug(f"integrations-overview feature accesses: {e}")
# --- UserConnection (active only) ---
try:
from modules.datamodels.datamodelUam import ConnectionStatus
for c in root.getUserConnections(userId):
st = c.status
st_val = st.value if hasattr(st, "value") else str(st)
if st_val != ConnectionStatus.ACTIVE.value:
continue
dumped = c.model_dump(mode="json")
dumped["kind"] = "userConnection"
out["dataLayerItems"].append(dumped)
except Exception as e:
logger.error(f"integrations-overview connections: {e}")
out["errors"].append(f"connections: {e}")
# --- DataSource & FeatureDataSource ---
try:
from modules.datamodels.datamodelDataSource import DataSource
from modules.datamodels.datamodelFeatureDataSource import FeatureDataSource
seen_ds: Set[str] = set()
for row in root.db.getRecordset(DataSource, recordFilter={"userId": userId}) or []:
rid = str(row.get("id", ""))
if not rid or rid in seen_ds:
continue
seen_ds.add(rid)
out["dataLayerItems"].append(
{
"kind": "dataSource",
"id": rid,
"label": row.get("label") or row.get("displayPath") or rid,
"sourceType": row.get("sourceType") or "",
"featureInstanceId": row.get("featureInstanceId"),
"mandateId": row.get("mandateId"),
"connectionId": row.get("connectionId"),
}
)
for iid in accessible_instance_ids:
for row in root.db.getRecordset(DataSource, recordFilter={"featureInstanceId": iid}) or []:
rid = str(row.get("id", ""))
if not rid or rid in seen_ds:
continue
seen_ds.add(rid)
out["dataLayerItems"].append(
{
"kind": "dataSource",
"id": rid,
"label": row.get("label") or row.get("displayPath") or rid,
"sourceType": row.get("sourceType") or "",
"featureInstanceId": row.get("featureInstanceId"),
"mandateId": row.get("mandateId"),
"connectionId": row.get("connectionId"),
}
)
seen_fds: Set[str] = set()
for row in root.db.getRecordset(FeatureDataSource, recordFilter={"userId": userId}) or []:
rid = str(row.get("id", ""))
if not rid or rid in seen_fds:
continue
seen_fds.add(rid)
out["dataLayerItems"].append(
{
"kind": "featureDataSource",
"id": rid,
"label": row.get("label") or rid,
"featureCode": row.get("featureCode") or "",
"tableName": row.get("tableName") or "",
"featureInstanceId": row.get("featureInstanceId"),
"mandateId": row.get("mandateId"),
}
)
for iid in accessible_instance_ids:
for row in root.db.getRecordset(FeatureDataSource, recordFilter={"featureInstanceId": iid}) or []:
rid = str(row.get("id", ""))
if not rid or rid in seen_fds:
continue
seen_fds.add(rid)
out["dataLayerItems"].append(
{
"kind": "featureDataSource",
"id": rid,
"label": row.get("label") or rid,
"featureCode": row.get("featureCode") or "",
"tableName": row.get("tableName") or "",
"featureInstanceId": row.get("featureInstanceId"),
"mandateId": row.get("mandateId"),
}
)
except Exception as e:
logger.error(f"integrations-overview datasources: {e}")
out["errors"].append(f"datasources: {e}")
# --- Trustee accounting systems (configured integrations per instance) ---
try:
from modules.features.trustee.datamodelFeatureTrustee import TrusteeAccountingConfig
fi = getFeatureInterface(root.db)
seen_acc: Set[str] = set()
for iid in accessible_instance_ids:
inst = fi.getFeatureInstance(iid)
if not inst or inst.featureCode != "trustee":
continue
for row in root.db.getRecordset(
TrusteeAccountingConfig,
recordFilter={"featureInstanceId": iid, "isActive": True},
) or []:
rid = str(row.get("id", ""))
if not rid or rid in seen_acc:
continue
seen_acc.add(rid)
out["dataLayerItems"].append(
{
"kind": "trusteeAccounting",
"id": rid,
"featureInstanceId": iid,
"instanceLabel": getattr(inst, "label", None) or "",
"mandateId": str(getattr(inst, "mandateId", "") or ""),
"connectorType": row.get("connectorType") or "",
"displayLabel": row.get("displayLabel") or row.get("connectorType") or rid,
}
)
except Exception as e:
logger.error(f"integrations-overview trustee accounting: {e}")
out["errors"].append(f"trusteeAccounting: {e}")
# --- Live stats (billing AI calls + workflow metrics) ---
liveStats: Dict[str, Any] = {
"aiCallCount": 0,
"aiCallPeriodDays": 30,
"totalWorkflows": 0,
"activeWorkflows": 0,
"totalRuns": 0,
"totalTokens": 0,
}
# Billing: count AI transactions in the last 30 days
if user is not None:
try:
from modules.interfaces.interfaceDbBilling import getInterface as getBillingInterface
mandateIds: List[str] = []
for um in root.getUserMandates(userId):
mid = getattr(um, "mandateId", None)
if mid and getattr(um, "enabled", True):
mandateIds.append(str(mid))
if mandateIds:
bi = getBillingInterface(user, mandateIds[0])
now = time.time()
startTs = now - 30 * 86400
stats = bi.getTransactionStatisticsAggregated(
mandateIds=mandateIds,
scope="all",
userId=userId,
startTs=startTs,
endTs=now,
period="month",
)
liveStats["aiCallCount"] = stats.get("transactionCount", 0)
except Exception as e:
logger.debug(f"integrations-overview billing stats: {e}")
# Workflow metrics (same logic as routeWorkflowDashboard.get_workflow_metrics)
try:
from modules.shared.configuration import APP_CONFIG
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.datamodels.datamodelPagination import PaginationParams
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import (
AutoWorkflow, AutoRun,
)
wfDb = DatabaseConnector(
dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
dbDatabase="poweron_graphicaleditor",
dbUser=APP_CONFIG.get("DB_USER"),
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"),
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),
userId=None,
)
if wfDb._ensureTableExists(AutoWorkflow):
mandateIds_wf: List[str] = []
for um in root.getUserMandates(userId):
mid = getattr(um, "mandateId", None)
if mid and getattr(um, "enabled", True):
mandateIds_wf.append(str(mid))
wfFilter: dict = {"isTemplate": False}
if mandateIds_wf:
wfFilter["mandateId"] = mandateIds_wf
else:
wfFilter["mandateId"] = "__impossible__"
wfCount = wfDb.getRecordsetPaginated(
AutoWorkflow,
pagination=PaginationParams(page=1, pageSize=1),
recordFilter=wfFilter,
)
liveStats["totalWorkflows"] = (
wfCount.get("totalItems", 0) if isinstance(wfCount, dict) else wfCount.totalItems
)
activeFilter = dict(wfFilter)
activeFilter["active"] = True
activeCount = wfDb.getRecordsetPaginated(
AutoWorkflow,
pagination=PaginationParams(page=1, pageSize=1),
recordFilter=activeFilter,
)
liveStats["activeWorkflows"] = (
activeCount.get("totalItems", 0) if isinstance(activeCount, dict) else activeCount.totalItems
)
if wfDb._ensureTableExists(AutoRun):
runFilter: dict = {}
if mandateIds_wf:
runFilter["mandateId"] = mandateIds_wf
else:
runFilter["ownerId"] = userId
runCount = wfDb.getRecordsetPaginated(
AutoRun,
pagination=PaginationParams(page=1, pageSize=1),
recordFilter=runFilter,
)
liveStats["totalRuns"] = (
runCount.get("totalItems", 0) if isinstance(runCount, dict) else runCount.totalItems
)
totalTokens = 0
totalRuns = liveStats["totalRuns"]
if 0 < totalRuns <= 10000:
allRuns = wfDb.getRecordset(
AutoRun, recordFilter=runFilter, fieldFilter=["costTokens"],
) or []
for r in allRuns:
totalTokens += r.get("costTokens", 0) or 0
liveStats["totalTokens"] = totalTokens
except Exception as e:
logger.debug(f"integrations-overview workflow stats: {e}")
out["liveStats"] = liveStats
return out
@router.get("/integrations-overview")
@limiter.limit("30/minute")
def get_integrations_overview(
request: Request,
reqContext: RequestContext = Depends(getRequestContext),
) -> Dict[str, Any]:
"""Aggregated, non-fictitious data for the PORTA integrations diagram."""
user_id = str(reqContext.user.id)
return _buildIntegrationsOverviewPayload(user_id, user=reqContext.user)
@router.get("/ai-models")
@limiter.limit("60/minute")
def get_ai_models_for_integrations(
request: Request,
reqContext: RequestContext = Depends(getRequestContext),
) -> Dict[str, Any]:
"""
Registered AI models for the Integrations architecture page.
Returns unique displayName entries with connector metadata (no callables).
"""
try:
from modules.aicore.aicoreModelRegistry import modelRegistry
modelRegistry.ensureConnectorsRegistered()
modelRegistry.refreshModels(force=False)
models = modelRegistry.getModels()
out: List[Dict[str, Any]] = []
seen: set = set()
for m in models:
if not getattr(m, "isAvailable", True):
continue
key = (m.displayName, m.connectorType)
if key in seen:
continue
seen.add(key)
dumped = m.model_dump(
exclude={"functionCall", "functionCallStream", "calculatepriceCHF"},
mode="json",
)
out.append(dumped)
return {"models": out}
except Exception as e:
logger.error(f"Error listing AI models: {e}")
return {"models": [], "error": str(e)}

View file

@ -12,15 +12,11 @@ import json
import base64 import base64
import secrets import secrets
import time import time
from fastapi import APIRouter, File, Form, UploadFile, Depends, HTTPException, Body, Query, Request, WebSocket, WebSocketDisconnect from fastapi import APIRouter, Depends, HTTPException, Query, Request, WebSocket, WebSocketDisconnect
from fastapi.responses import Response
from typing import Optional, Dict, Any, List from typing import Optional, Dict, Any, List
from modules.auth import getCurrentUser, getRequestContext, RequestContext, limiter from modules.auth import getCurrentUser, getRequestContext, RequestContext, limiter
from modules.datamodels.datamodelUam import User from modules.datamodels.datamodelUam import User
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface, VoiceObjects from modules.interfaces.interfaceVoiceObjects import getVoiceInterface, VoiceObjects
from modules.shared.i18nRegistry import apiRouteContext
routeApiMsg = apiRouteContext("routeVoiceGoogle")
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
router = APIRouter(prefix="/voice-google", tags=["Voice Google"]) router = APIRouter(prefix="/voice-google", tags=["Voice Google"])
@ -63,299 +59,6 @@ def _getVoiceInterface(currentUser: User) -> VoiceObjects:
detail=f"Failed to initialize voice interface: {str(e)}" detail=f"Failed to initialize voice interface: {str(e)}"
) )
@router.post("/speech-to-text")
async def speech_to_text(
audioFile: UploadFile = File(...),
language: str = Form("de-DE"),
currentUser: User = Depends(getCurrentUser)
):
"""Convert speech to text using Google Cloud Speech-to-Text API."""
try:
logger.info(f"🎤 Speech-to-text request: {audioFile.filename}, language: {language}")
# Read audio file
audioContent = await audioFile.read()
logger.info(f"📊 Audio file size: {len(audioContent)} bytes")
# Get voice interface
voiceInterface = _getVoiceInterface(currentUser)
# Validate audio format
validation = voiceInterface.validateAudioFormat(audioContent)
if not validation["valid"]:
raise HTTPException(
status_code=400,
detail=f"Invalid audio format: {validation.get('error', 'Unknown error')}"
)
# Perform speech recognition
result = await voiceInterface.speechToText(
audioContent=audioContent,
language=language
)
if result["success"]:
return {
"success": True,
"text": result["text"],
"confidence": result["confidence"],
"language": result["language"],
"audio_info": {
"size": len(audioContent),
"format": validation["format"],
"estimated_duration": validation.get("estimated_duration", 0)
}
}
else:
raise HTTPException(
status_code=400,
detail=f"Speech recognition failed: {result.get('error', 'Unknown error')}"
)
except HTTPException:
raise
except Exception as e:
logger.error(f"❌ Speech-to-text error: {e}")
raise HTTPException(
status_code=500,
detail=f"Speech-to-text processing failed: {str(e)}"
)
@router.post("/detect-language")
async def detect_language(
text: str = Form(...),
currentUser: User = Depends(getCurrentUser)
):
"""Detect the language of text using Google Cloud Translation API."""
try:
logger.info(f"🔍 Language detection request: '{text[:100]}...'")
if not text.strip():
raise HTTPException(
status_code=400,
detail=routeApiMsg("Empty text provided for language detection")
)
# Get voice interface
voiceInterface = _getVoiceInterface(currentUser)
# Perform language detection
result = await voiceInterface.detectLanguage(text)
if result["success"]:
return {
"success": True,
"language": result["language"],
"confidence": result.get("confidence", 1.0)
}
else:
raise HTTPException(
status_code=400,
detail=f"Language detection failed: {result.get('error', 'Unknown error')}"
)
except HTTPException:
raise
except Exception as e:
logger.error(f"❌ Language detection error: {e}")
raise HTTPException(
status_code=500,
detail=f"Language detection processing failed: {str(e)}"
)
@router.post("/translate")
async def translate_text(
text: str = Form(...),
sourceLanguage: str = Form("de"),
targetLanguage: str = Form("en"),
currentUser: User = Depends(getCurrentUser)
):
"""Translate text using Google Cloud Translation API."""
try:
logger.info(f"🌐 Translation request: '{text}' ({sourceLanguage} -> {targetLanguage})")
if not text.strip():
raise HTTPException(
status_code=400,
detail=routeApiMsg("Empty text provided for translation")
)
# Get voice interface
voiceInterface = _getVoiceInterface(currentUser)
# Perform translation
result = await voiceInterface.translateText(
text=text,
sourceLanguage=sourceLanguage,
targetLanguage=targetLanguage
)
if result["success"]:
return {
"success": True,
"original_text": result["original_text"],
"translated_text": result["translated_text"],
"source_language": result["source_language"],
"target_language": result["target_language"]
}
else:
raise HTTPException(
status_code=400,
detail=f"Translation failed: {result.get('error', 'Unknown error')}"
)
except HTTPException:
raise
except Exception as e:
logger.error(f"❌ Translation error: {e}")
raise HTTPException(
status_code=500,
detail=f"Translation processing failed: {str(e)}"
)
@router.post("/realtime-interpreter")
async def realtime_interpreter(
audioFile: UploadFile = File(...),
fromLanguage: str = Form("de-DE"),
toLanguage: str = Form("en-US"),
connectionId: str = Form(None),
currentUser: User = Depends(getCurrentUser)
):
"""Real-time interpreter: speech to translated text using Google Cloud APIs."""
try:
logger.info(f"🔄 Real-time interpreter request: {audioFile.filename}")
logger.info(f" From: {fromLanguage} -> To: {toLanguage}")
logger.info(f" MIME type: {audioFile.content_type}")
# Read audio file
audioContent = await audioFile.read()
logger.info(f"📊 Audio file size: {len(audioContent)} bytes")
# Save audio file for debugging with correct extension
# file_extension = "webm" if audio_file.filename.endswith('.webm') else "wav"
# debug_filename = f"debug_audio/audio_google_{audio_file.filename.replace('.wav', '.webm')}"
# os.makedirs("debug_audio", exist_ok=True)
# with open(debug_filename, "wb") as f:
# f.write(audio_content)
# logger.info(f"💾 Saved audio file for debugging: {debug_filename}")
# Get voice interface
voiceInterface = _getVoiceInterface(currentUser)
# Validate audio format
validation = voiceInterface.validateAudioFormat(audioContent)
if not validation["valid"]:
raise HTTPException(
status_code=400,
detail=f"Invalid audio format: {validation.get('error', 'Unknown error')}"
)
# Perform complete pipeline: Speech-to-Text + Translation
result = await voiceInterface.speechToTranslatedText(
audioContent=audioContent,
fromLanguage=fromLanguage,
toLanguage=toLanguage
)
if result["success"]:
logger.info(f"✅ Real-time interpreter successful:")
logger.info(f" Original: '{result['original_text']}'")
logger.info(f" Translated: '{result['translated_text']}'")
return {
"success": True,
"original_text": result["original_text"],
"translated_text": result["translated_text"],
"confidence": result["confidence"],
"source_language": result["source_language"],
"target_language": result["target_language"],
"audio_info": {
"size": len(audioContent),
"format": validation["format"],
"estimated_duration": validation.get("estimated_duration", 0)
}
}
else:
raise HTTPException(
status_code=400,
detail=f"Real-time interpreter failed: {result.get('error', 'Unknown error')}"
)
except HTTPException:
raise
except Exception as e:
logger.error(f"❌ Real-time interpreter error: {e}")
raise HTTPException(
status_code=500,
detail=f"Real-time interpreter processing failed: {str(e)}"
)
@router.post("/text-to-speech")
async def text_to_speech(
request: Request,
text: str = Form(...),
language: str = Form("de-DE"),
voice: str = Form(None),
context: RequestContext = Depends(getRequestContext),
):
"""Convert text to speech using Google Cloud Text-to-Speech."""
try:
logger.info(f"Text-to-Speech request: '{text[:50]}...' in {language}")
if not text.strip():
raise HTTPException(
status_code=400,
detail=routeApiMsg("Empty text provided for text-to-speech")
)
mandateId = str(getattr(context, "mandateId", "") or "")
voiceInterface = getVoiceInterface(context.user, mandateId)
try:
from modules.serviceCenter.services.serviceBilling.mainServiceBilling import getService as getBillingService
billingService = getBillingService(context.user, mandateId)
def _billingCb(data):
priceCHF = data.get("priceCHF", 0.0)
operation = data.get("operation", "voice")
if priceCHF > 0:
billingService.recordUsage(priceCHF=priceCHF, aicoreProvider="google-voice", aicoreModel=operation, description=f"Voice {operation}")
voiceInterface.billingCallback = _billingCb
except Exception as e:
logger.warning(f"TTS billing setup skipped: {e}")
result = await voiceInterface.textToSpeech(
text=text,
languageCode=language,
voiceName=voice
)
if result["success"]:
return Response(
content=result["audioContent"],
media_type="audio/mpeg",
headers={
"Content-Disposition": "attachment; filename=speech.mp3",
"X-Voice-Name": result.get("voiceName", ""),
"X-Language-Code": result.get("languageCode", language),
}
)
else:
raise HTTPException(
status_code=400,
detail=f"Text-to-Speech failed: {result.get('error', 'Unknown error')}"
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Text-to-Speech error: {e}")
raise HTTPException(
status_code=500,
detail=f"Text-to-Speech processing failed: {str(e)}"
)
@router.get("/languages") @router.get("/languages")
async def get_available_languages(currentUser: User = Depends(getCurrentUser)): async def get_available_languages(currentUser: User = Depends(getCurrentUser)):
"""Get available languages from Google Cloud Text-to-Speech.""" """Get available languages from Google Cloud Text-to-Speech."""
@ -426,71 +129,6 @@ async def get_available_voices(
detail=f"Failed to get available voices: {str(e)}" detail=f"Failed to get available voices: {str(e)}"
) )
@router.get("/health")
async def health_check(currentUser: User = Depends(getCurrentUser)):
"""Health check for Google Cloud voice services."""
try:
voiceInterface = _getVoiceInterface(currentUser)
test_result = await voiceInterface.healthCheck()
return test_result
except Exception as e:
logger.error(f"❌ Health check failed: {e}")
return {
"status": "unhealthy",
"error": str(e)
}
@router.get("/settings")
async def get_voice_settings(currentUser: User = Depends(getCurrentUser)):
"""Get voice settings for the current user (reads from UserVoicePreferences)."""
from modules.datamodels.datamodelUam import UserVoicePreferences
from modules.interfaces.interfaceDbApp import getRootInterface
rootInterface = getRootInterface()
userId = str(currentUser.id)
prefs = rootInterface.db.getRecordset(
UserVoicePreferences, recordFilter={"userId": userId}
)
if prefs:
data = prefs[0] if isinstance(prefs[0], dict) else prefs[0].model_dump()
return {"success": True, "data": {"user_settings": data}}
return {"success": True, "data": {"user_settings": UserVoicePreferences(userId=userId).model_dump()}}
@router.post("/settings")
async def save_voice_settings(
settings: Dict[str, Any] = Body(...),
currentUser: User = Depends(getCurrentUser)
):
"""Save voice settings for the current user (writes to UserVoicePreferences)."""
from modules.datamodels.datamodelUam import UserVoicePreferences, _normalizeTtsVoiceMap
from modules.interfaces.interfaceDbApp import getRootInterface
rootInterface = getRootInterface()
userId = str(currentUser.id)
allowedFields = {
"sttLanguage", "ttsLanguage", "ttsVoice", "ttsVoiceMap",
"translationSourceLanguage", "translationTargetLanguage",
}
updateData = {k: v for k, v in settings.items() if k in allowedFields}
if "ttsVoiceMap" in updateData:
updateData["ttsVoiceMap"] = _normalizeTtsVoiceMap(updateData["ttsVoiceMap"])
existing = rootInterface.db.getRecordset(
UserVoicePreferences, recordFilter={"userId": userId}
)
if existing:
existingRecord = existing[0]
existingId = existingRecord.get("id") if isinstance(existingRecord, dict) else existingRecord.id
rootInterface.db.recordModify(UserVoicePreferences, existingId, updateData)
else:
newPrefs = UserVoicePreferences(userId=userId, **updateData)
rootInterface.db.recordCreate(UserVoicePreferences, newPrefs.model_dump())
return {"success": True, "message": "Voice settings saved successfully", "data": updateData}
# ========================================================================= # =========================================================================
# STT Streaming WebSocket — generic, used by all features # STT Streaming WebSocket — generic, used by all features
# ========================================================================= # =========================================================================

View file

@ -1,16 +1,17 @@
# Copyright (c) 2025 Patrick Motsch # Copyright (c) 2025 Patrick Motsch
# All rights reserved. # All rights reserved.
""" """
System-level Workflow Runs Dashboard API. System-level Workflow Dashboard API.
Provides cross-feature, cross-mandate access to workflow runs Provides cross-feature, cross-mandate access to workflow runs AND workflows
with RBAC scoping: user sees own runs, mandate admin sees mandate runs, with RBAC scoping: user sees own runs/workflows, mandate admin sees mandate
sysadmin sees all runs. runs/workflows, sysadmin sees all.
""" """
import json
import logging import logging
import math import math
from typing import Optional from typing import Optional, List
from fastapi import APIRouter, Depends, Request, Query, Path, HTTPException from fastapi import APIRouter, Depends, Request, Query, Path, HTTPException
from slowapi import Limiter from slowapi import Limiter
from slowapi.util import get_remote_address from slowapi.util import get_remote_address
@ -20,6 +21,8 @@ from modules.interfaces.interfaceDbApp import getRootInterface
from modules.connectors.connectorDbPostgre import DatabaseConnector from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
from modules.datamodels.datamodelPagination import PaginationParams from modules.datamodels.datamodelPagination import PaginationParams
from modules.datamodels.datamodelFeatures import FeatureInstance
from modules.datamodels.datamodelUam import Mandate
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import ( from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import (
AutoRun, AutoStepLog, AutoWorkflow, AutoTask, AutoRun, AutoStepLog, AutoWorkflow, AutoTask,
) )
@ -91,6 +94,12 @@ def _getAdminMandateIds(userId: str, mandateIds: list) -> list:
return [mid for mid in mandateIds if mid in adminMandates] return [mid for mid in mandateIds if mid in adminMandates]
def _isUserMandateAdmin(userId: str, mandateId: str) -> bool:
"""Check if user is admin for a specific mandate."""
adminIds = _getAdminMandateIds(userId, [mandateId])
return mandateId in adminIds
def _scopedRunFilter(context: RequestContext) -> Optional[dict]: def _scopedRunFilter(context: RequestContext) -> Optional[dict]:
""" """
Build a DB filter dict based on RBAC: Build a DB filter dict based on RBAC:
@ -114,6 +123,38 @@ def _scopedRunFilter(context: RequestContext) -> Optional[dict]:
return {"ownerId": userId} return {"ownerId": userId}
def _scopedWorkflowFilter(context: RequestContext) -> Optional[dict]:
"""
Build a DB filter for AutoWorkflow based on RBAC:
- sysadmin: None (no filter, sees all)
- normal user: mandateId IN user's mandates
"""
if context.hasSysAdminRole:
return None
userId = str(context.user.id) if context.user else None
if not userId:
return {"mandateId": "__impossible__"}
mandateIds = _getUserMandateIds(userId)
if mandateIds:
return {"mandateId": mandateIds}
return {"mandateId": "__impossible__"}
def _getManagementDb() -> DatabaseConnector:
"""Get connector to the management DB for Mandate/FeatureInstance lookups."""
return DatabaseConnector(
dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
dbDatabase=APP_CONFIG.get("DB_NAME", "poweron_management"),
dbUser=APP_CONFIG.get("DB_USER"),
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"),
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),
userId=None,
)
@router.get("") @router.get("")
@limiter.limit("60/minute") @limiter.limit("60/minute")
def get_workflow_runs( def get_workflow_runs(
@ -268,3 +309,114 @@ def get_run_steps(
steps = [dict(r) for r in records] if records else [] steps = [dict(r) for r in records] if records else []
steps.sort(key=lambda s: s.get("startedAt") or 0) steps.sort(key=lambda s: s.get("startedAt") or 0)
return {"steps": steps} return {"steps": steps}
# ---------------------------------------------------------------------------
# System-level Workflow listing (all workflows the user can see via RBAC)
# ---------------------------------------------------------------------------
@router.get("/workflows")
@limiter.limit("60/minute")
def get_system_workflows(
request: Request,
active: Optional[bool] = Query(None, description="Filter by active status"),
mandateId: Optional[str] = Query(None, description="Filter by mandate"),
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams"),
context: RequestContext = Depends(getRequestContext),
) -> dict:
"""List all workflows the user has access to (RBAC-scoped, cross-instance)."""
db = _getDb()
if not db._ensureTableExists(AutoWorkflow):
return {"items": [], "pagination": {"currentPage": 1, "pageSize": 25, "totalItems": 0, "totalPages": 0}}
baseFilter = _scopedWorkflowFilter(context)
recordFilter = dict(baseFilter) if baseFilter else {}
recordFilter["isTemplate"] = False
if active is not None:
recordFilter["active"] = active
if mandateId:
recordFilter["mandateId"] = mandateId
paginationParams = None
if pagination:
try:
paginationParams = PaginationParams(**json.loads(pagination))
except Exception:
pass
if not paginationParams:
paginationParams = PaginationParams(
page=1,
pageSize=25,
sort=[{"field": "sysCreatedAt", "direction": "desc"}],
)
result = db.getRecordsetPaginated(
AutoWorkflow,
pagination=paginationParams,
recordFilter=recordFilter if recordFilter else None,
)
pageItems = result.get("items", []) if isinstance(result, dict) else result.items
totalItems = result.get("totalItems", 0) if isinstance(result, dict) else result.totalItems
totalPages = result.get("totalPages", 0) if isinstance(result, dict) else result.totalPages
mandateIds = list({w.get("mandateId") for w in pageItems if w.get("mandateId")})
instanceIds = list({w.get("featureInstanceId") for w in pageItems if w.get("featureInstanceId")})
mandateLabelMap: dict = {}
instanceLabelMap: dict = {}
try:
mgmtDb = _getManagementDb()
if mandateIds and mgmtDb._ensureTableExists(Mandate):
mandates = mgmtDb.getRecordset(Mandate, recordFilter={"id": mandateIds})
for m in (mandates or []):
row = dict(m)
mandateLabelMap[row.get("id")] = row.get("label") or row.get("name") or row.get("id")
if instanceIds and mgmtDb._ensureTableExists(FeatureInstance):
instances = mgmtDb.getRecordset(FeatureInstance, recordFilter={"id": instanceIds})
for fi in (instances or []):
row = dict(fi)
instanceLabelMap[row.get("id")] = row.get("label") or row.get("id")
except Exception as e:
logger.warning(f"Failed to enrich workflow labels: {e}")
userId = str(context.user.id) if context.user else None
adminMandateIds = []
if userId and not context.hasSysAdminRole:
userMandateIds = _getUserMandateIds(userId)
adminMandateIds = _getAdminMandateIds(userId, userMandateIds)
items = []
for w in pageItems:
row = dict(w)
wMandateId = row.get("mandateId")
row["mandateLabel"] = mandateLabelMap.get(wMandateId, wMandateId or "")
row["instanceLabel"] = instanceLabelMap.get(row.get("featureInstanceId"), row.get("featureInstanceId") or "")
if context.hasSysAdminRole:
row["canEdit"] = True
row["canDelete"] = True
row["canExecute"] = True
elif wMandateId and wMandateId in adminMandateIds:
row["canEdit"] = True
row["canDelete"] = True
row["canExecute"] = True
else:
row["canEdit"] = False
row["canDelete"] = False
row["canExecute"] = False
row.pop("graph", None)
items.append(row)
return {
"items": items,
"pagination": {
"currentPage": paginationParams.page,
"pageSize": paginationParams.pageSize,
"totalItems": totalItems,
"totalPages": totalPages,
},
}

View file

@ -322,13 +322,6 @@ def _buildSummaryPrompt(
return prompt return prompt
_LANGUAGE_NAMES = {
"de": "German", "en": "English", "fr": "French", "it": "Italian",
"es": "Spanish", "pt": "Portuguese", "nl": "Dutch", "ja": "Japanese",
"zh": "Chinese", "ko": "Korean", "ar": "Arabic", "ru": "Russian",
}
def buildSystemPrompt( def buildSystemPrompt(
tools: List[ToolDefinition], tools: List[ToolDefinition],
toolsFormatted: str = None, toolsFormatted: str = None,
@ -339,16 +332,14 @@ def buildSystemPrompt(
Args: Args:
tools: Available tool definitions. tools: Available tool definitions.
toolsFormatted: Pre-formatted tool descriptions for text-based fallback. toolsFormatted: Pre-formatted tool descriptions for text-based fallback.
userLanguage: ISO 639-1 language code (e.g. "de", "en"). The agent will userLanguage: Kept for backwards compatibility, no longer used for language selection.
respond in this language.
""" """
langName = _LANGUAGE_NAMES.get(userLanguage, "")
langInstruction = ( langInstruction = (
f"IMPORTANT: Always respond in {langName} ({userLanguage}). " "IMPORTANT: Always respond in the same language the user writes in. "
f"The user's language is {langName}. All your messages, explanations, " "If the user writes in German, respond in German. If in French, respond in French. "
f"and summaries MUST be in {langName}. " "Generate documents and content in the user's language unless explicitly asked otherwise. "
f"Only use English for tool call arguments and technical identifiers.\n\n" "Only use English for tool call arguments and technical identifiers.\n\n"
) if langName else "" )
prompt = ( prompt = (
f"{langInstruction}" f"{langInstruction}"

View file

@ -123,11 +123,22 @@ def _registerFeatureSubAgentTools(registry: ToolRegistry, services):
selectedTables = catalog.getDataObjects(featureCode) selectedTables = catalog.getDataObjects(featureCode)
else: else:
allObjs = {o["meta"]["table"]: o for o in catalog.getDataObjects(featureCode) if "meta" in o and "table" in o.get("meta", {})} allObjs = {o["meta"]["table"]: o for o in catalog.getDataObjects(featureCode) if "meta" in o and "table" in o.get("meta", {})}
selectedTables = [allObjs[ds["tableName"]] for ds in featureDataSources if ds.get("tableName") in allObjs] selectedTables = []
_wildcardExpanded = False
for ds in featureDataSources: for ds in featureDataSources:
rf = ds.get("recordFilter") tn = ds.get("tableName", "")
if rf and isinstance(rf, dict) and ds.get("tableName"): ok = ds.get("objectKey", "")
tableFilters[ds["tableName"]] = rf if ok.endswith(".*") or (not tn and ok):
selectedTables = list(allObjs.values())
_wildcardExpanded = True
break
if tn in allObjs:
selectedTables.append(allObjs[tn])
if not _wildcardExpanded:
for ds in featureDataSources:
rf = ds.get("recordFilter")
if rf and isinstance(rf, dict) and ds.get("tableName"):
tableFilters[ds["tableName"]] = rf
if not selectedTables: if not selectedTables:
return ToolResult( return ToolResult(

View file

@ -121,7 +121,7 @@ class AgentService:
if workflowId is None: if workflowId is None:
workflowId = getattr(self.services.workflow, "id", "unknown") if self.services.workflow else "unknown" workflowId = getattr(self.services.workflow, "id", "unknown") if self.services.workflow else "unknown"
resolvedLanguage = userLanguage or getattr(self.services.user, "language", "") or "de" resolvedLanguage = userLanguage or ""
enrichedPrompt = await self._enrichPromptWithFiles(prompt, fileIds) enrichedPrompt = await self._enrichPromptWithFiles(prompt, fileIds)
@ -365,15 +365,27 @@ class AgentService:
toolCallId="", toolName=REQUEST_TOOLBOX_TOOL_NAME, toolCallId="", toolName=REQUEST_TOOLBOX_TOOL_NAME,
success=False, error=f"Unknown toolbox: {toolboxId}", success=False, error=f"Unknown toolbox: {toolboxId}",
) )
activatedCount = 0
for toolName in tb.tools: for toolName in tb.tools:
if not registry.isValidTool(toolName): if registry.isValidTool(toolName):
logger.info("requestToolbox: tool '%s' from toolbox '%s' not yet registered, skipping", toolName, toolboxId) activatedCount += 1
continue continue
logger.info("requestToolbox: activated toolbox '%s' (%d tools). Reason: %s", toolboxId, len(tb.tools), reason) try:
from modules.serviceCenter.services.serviceAgent.actionToolAdapter import ActionToolAdapter
adapter = ActionToolAdapter(self._getService("actionExecutor"))
adapter.registerAll(registry)
if registry.isValidTool(toolName):
activatedCount += 1
logger.info("requestToolbox: re-registered tool '%s' from toolbox '%s'", toolName, toolboxId)
else:
logger.warning("requestToolbox: tool '%s' from toolbox '%s' could not be registered", toolName, toolboxId)
except Exception as regErr:
logger.warning("requestToolbox: failed to register tool '%s': %s", toolName, regErr)
logger.info("requestToolbox: activated toolbox '%s' (%d/%d tools). Reason: %s", toolboxId, activatedCount, len(tb.tools), reason)
return ToolResult( return ToolResult(
toolCallId="", toolName=REQUEST_TOOLBOX_TOOL_NAME, toolCallId="", toolName=REQUEST_TOOLBOX_TOOL_NAME,
success=True, success=True,
data=f"Toolbox '{tb.label}' activated with {len(tb.tools)} tools. They are now available.", data=f"Toolbox '{tb.label}' activated with {activatedCount} tools. They are now available.",
) )
registry.register( registry.register(

View file

@ -52,6 +52,15 @@ NAVIGATION_SECTIONS = [
"order": 10, "order": 10,
"public": True, "public": True,
}, },
{
"id": "integrations",
"objectKey": "ui.system.integrations",
"label": t("Integrationen"),
"icon": "FaProjectDiagram",
"path": "/integrations",
"order": 15,
"public": True,
},
], ],
"subgroups": [ "subgroups": [
# ── Basisdaten ── # ── Basisdaten ──

View file

@ -96,6 +96,13 @@ async def refreshAccountingData(self, parameters: Dict[str, Any]) -> ActionResul
summary.pop("startedAt", None) summary.pop("startedAt", None)
summary.pop("finishedAt", None) summary.pop("finishedAt", None)
try:
from modules.serviceCenter.services.serviceAgent.coreTools._featureSubAgentTools import clearFeatureQueryCache
clearFeatureQueryCache(featureInstanceId)
logger.info("Cleared feature query cache for instance %s after accounting import", featureInstanceId)
except Exception as cacheErr:
logger.warning("Could not clear feature query cache: %s", cacheErr)
return ActionResult.isSuccess(documents=[ return ActionResult.isSuccess(documents=[
ActionDocument( ActionDocument(
documentName="refresh_result", documentName="refresh_result",

View file

@ -1,444 +0,0 @@
================================================================================
FUNCTION IMPORTS ANALYSIS
================================================================================
Total function imports (internal modules): 229
- CIRCULAR (must stay): 4
- REDUNDANT (can remove): 0
- MOVABLE (can move): 225
================================================================================
MOVABLE TO HEADER (grouped by source module)
These imports could potentially be moved to the module header.
================================================================================
gateway.app
-----------
[lifespan] modules.shared.auditLogger
gateway.modules.auth.authentication
-----------------------------------
[requireSysAdmin] modules.shared.auditLogger
gateway.modules.auth.tokenManager
---------------------------------
[getFreshToken] modules.interfaces.interfaceDbApp
[getFreshToken] modules.security.rootAccess
gateway.modules.auth.tokenRefreshService
----------------------------------------
[_refresh_google_token] modules.auth.tokenManager
[_refresh_microsoft_token] modules.auth.tokenManager
[proactive_refresh] modules.interfaces.interfaceDbApp
[refresh_expired_tokens] modules.interfaces.interfaceDbApp
[proactive_refresh] modules.security.rootAccess
[refresh_expired_tokens] modules.security.rootAccess
gateway.modules.datamodels.datamodelChat
----------------------------------------
[updateFromSelection] modules.datamodels.datamodelWorkflow
gateway.modules.features.aichat.mainAiChat
------------------------------------------
[onStart] modules.aicore.aicoreModelRegistry
gateway.modules.features.automation.routeFeatureAutomation
----------------------------------------------------------
[execute_automation] modules.services
gateway.modules.features.chatbot.datamodelFeatureChatbot
--------------------------------------------------------
[updateFromSelection] modules.datamodels.datamodelWorkflow
gateway.modules.features.chatbot.interfaceFeatureChatbot
--------------------------------------------------------
[createLog] modules.features.chatbot.eventManager
[createMessage] modules.features.chatbot.eventManager
[_enrichAutomationsWithUserAndMandate] modules.interfaces.interfaceDbApp
[storeDebugMessageAndDocuments] modules.interfaces.interfaceDbManagement
[setUserContext] modules.security.rootAccess
[_notifyAutomationChanged] modules.shared.callbackRegistry
[storeDebugMessageAndDocuments] modules.shared.debugLogger
[deleteAutomationDefinition] modules.shared.eventManagement
gateway.modules.features.chatbot.mainChatbot
--------------------------------------------
[_convert_file_ids_to_document_references] modules.interfaces.interfaceRbac
gateway.modules.features.neutralizer.mainNeutralizePlayground
-------------------------------------------------------------
[processSharepointFiles] modules.services.serviceSharepoint.mainServiceSharepoint
gateway.modules.features.realestate.interfaceFeatureRealEstate
--------------------------------------------------------------
[setUserContext] modules.security.rootAccess
gateway.modules.features.realestate.mainRealEstate
--------------------------------------------------
[executeIntentBasedOperation] modules.features.realestate.datamodelFeatureRealEstate
gateway.modules.features.trustee.interfaceFeatureTrustee
--------------------------------------------------------
[setUserContext] modules.security.rootAccess
gateway.modules.interfaces.interfaceBootstrap
---------------------------------------------
[_applyDatabaseOptimizations] modules.shared.dbMultiTenantOptimizations
gateway.modules.interfaces.interfaceDbApp
-----------------------------------------
[getRootInterface] modules.security.rootAccess
gateway.modules.interfaces.interfaceDbChat
------------------------------------------
[_enrichAutomationsWithUserAndMandate] modules.interfaces.interfaceDbApp
[storeDebugMessageAndDocuments] modules.interfaces.interfaceDbManagement
[setUserContext] modules.security.rootAccess
[_notifyAutomationChanged] modules.shared.callbackRegistry
[storeDebugMessageAndDocuments] modules.shared.debugLogger
gateway.modules.interfaces.interfaceDbManagement
------------------------------------------------
[_initializeStandardPrompts] modules.interfaces.interfaceDbApp
[_initializeStandardPrompts] modules.security.rootAccess
[setUserContext] modules.security.rootAccess
gateway.modules.interfaces.interfaceFeatures
--------------------------------------------
[syncRolesFromTemplate] modules.datamodels.datamodelMembership
gateway.modules.interfaces.interfaceRbac
----------------------------------------
[getRecordsetWithRBAC] modules.connectors.connectorDbPostgre
gateway.modules.interfaces.interfaceTicketObjects
-------------------------------------------------
[createTicketInterfaceByType] modules.connectors.connectorTicketsClickup
[createTicketInterfaceByType] modules.connectors.connectorTicketsJira
gateway.modules.routes.routeAdminAutomationEvents
-------------------------------------------------
[sync_all_automation_events] modules.interfaces.interfaceDbApp
[sync_all_automation_events] modules.services
[get_all_automation_events] modules.shared.eventManagement
[remove_event] modules.shared.eventManagement
[sync_all_automation_events] modules.workflows.automation
gateway.modules.routes.routeAdminFeatures
-----------------------------------------
[_getInstancePermissions] modules.datamodels.datamodelMembership
[_getUserRoleInInstance] modules.datamodels.datamodelMembership
[addUserToFeatureInstance] modules.datamodels.datamodelMembership
[listFeatureInstanceUsers] modules.datamodels.datamodelMembership
[removeUserFromFeatureInstance] modules.datamodels.datamodelMembership
[updateFeatureInstanceUserRoles] modules.datamodels.datamodelMembership
[_getInstancePermissions] modules.datamodels.datamodelRbac
[_getUserRoleInInstance] modules.datamodels.datamodelRbac
[_hasMandateAdminRole] modules.datamodels.datamodelRbac
[getFeatureInstanceAvailableRoles] modules.datamodels.datamodelRbac
[listFeatureInstanceUsers] modules.datamodels.datamodelRbac
gateway.modules.routes.routeDataUsers
-------------------------------------
[delete_user] modules.datamodels.datamodelMembership
[get_user] modules.datamodels.datamodelMembership
[reset_user_password] modules.datamodels.datamodelMembership
[sendPasswordLink] modules.datamodels.datamodelMembership
[update_user] modules.datamodels.datamodelMembership
[sendPasswordLink] modules.services
[change_password] modules.shared.auditLogger
[reset_user_password] modules.shared.auditLogger
[sendPasswordLink] modules.shared.auditLogger
[sendPasswordLink] modules.shared.configuration
gateway.modules.routes.routeDataWorkflows
-----------------------------------------
[get_action_schema] modules.services
[get_all_actions] modules.services
[get_method_actions] modules.services
[get_action_schema] modules.workflows.processing.shared.methodDiscovery
[get_all_actions] modules.workflows.processing.shared.methodDiscovery
[get_method_actions] modules.workflows.processing.shared.methodDiscovery
gateway.modules.routes.routeGdpr
--------------------------------
[exportUserData] modules.datamodels.datamodelFeatures
[deleteAccount] modules.datamodels.datamodelInvitation
[exportUserData] modules.datamodels.datamodelInvitation
[deleteAccount] modules.datamodels.datamodelMembership
[exportPortableData] modules.datamodels.datamodelMembership
[exportUserData] modules.datamodels.datamodelMembership
[deleteAccount] modules.datamodels.datamodelSecurity
gateway.modules.routes.routeInvitations
---------------------------------------
[createInvitation] modules.datamodels.datamodelFeatures
[_hasMandateAdminRole] modules.datamodels.datamodelRbac
[_isInstanceRole] modules.datamodels.datamodelRbac
[createInvitation] modules.datamodels.datamodelRbac
[registerAndAcceptInvitation] modules.security.passwordUtils
[createInvitation] modules.shared.configuration
[listInvitations] modules.shared.configuration
gateway.modules.routes.routeMessaging
-------------------------------------
[_hasTriggerPermission] modules.interfaces.interfaceDbApp
[triggerSubscription] modules.services
gateway.modules.routes.routeSecurityAdmin
-----------------------------------------
[revoke_tokens_by_mandate] modules.datamodels.datamodelMembership
gateway.modules.routes.routeSecurityGoogle
------------------------------------------
[auth_callback] modules.datamodels.datamodelSecurity
[logout] modules.shared.auditLogger
gateway.modules.routes.routeSecurityLocal
-----------------------------------------
[_sendAuthEmail] modules.datamodels.datamodelMessaging
[_sendAuthEmail] modules.interfaces.interfaceMessaging
[login] modules.shared.auditLogger
[logout] modules.shared.auditLogger
[passwordReset] modules.shared.auditLogger
gateway.modules.routes.routeSecurityMsft
----------------------------------------
[logout] modules.shared.auditLogger
gateway.modules.security.rootAccess
-----------------------------------
[_ensureBootstrap] modules.interfaces.interfaceBootstrap
gateway.modules.services.__init__
---------------------------------
[__init__] modules.interfaces.interfaceDbApp
[__init__] modules.interfaces.interfaceDbChat
[__init__] modules.interfaces.interfaceDbManagement
gateway.modules.services.serviceAi.mainAiChat
---------------------------------------------
[onStart] modules.aicore.aicoreModelRegistry
gateway.modules.services.serviceAi.mainServiceAi
------------------------------------------------
[renderResult] modules.services.serviceGeneration.mainServiceGeneration
[_handleCodeGeneration] modules.services.serviceGeneration.paths.codePath
[_handleDocumentGeneration] modules.services.serviceGeneration.paths.documentPath
[_handleImageGeneration] modules.services.serviceGeneration.paths.imagePath
gateway.modules.services.serviceAi.subContentExtraction
-------------------------------------------------------
[extractTextFromImage] modules.datamodels.datamodelAi
[processTextContentWithAi] modules.datamodels.datamodelAi
gateway.modules.services.serviceAi.subJsonResponseHandling
----------------------------------------------------------
[mergeFragmentIntoSection] modules.shared.debugLogger
gateway.modules.services.serviceAi.subStructureFilling
------------------------------------------------------
[_getAcceptedSectionTypesForFormat] modules.datamodels.datamodelJson
[_getAcceptedSectionTypesForFormat] modules.services.serviceGeneration.renderers.registry
[buildSectionPromptWithContinuation] modules.shared.jsonContinuation
[_extractAndMergeMultipleJsonBlocks] modules.shared.jsonUtils
[_processAiResponseForSection] modules.shared.jsonUtils
[_processSingleSection] modules.shared.jsonUtils
gateway.modules.services.serviceAi.subStructureGeneration
---------------------------------------------------------
[generateStructure] modules.services.serviceGeneration.renderers.registry
[generateStructure] modules.shared
[generateStructure] modules.shared.jsonContinuation
gateway.modules.services.serviceChat.mainServiceChat
----------------------------------------------------
[getChatDocumentsFromDocumentList] modules.datamodels.datamodelDocref
gateway.modules.services.serviceExtraction.mainServiceExtraction
----------------------------------------------------------------
[extractContent] modules.interfaces.interfaceDbManagement
[extractContent] modules.shared.debugLogger
gateway.modules.services.serviceExtraction.subPromptBuilderExtraction
---------------------------------------------------------------------
[buildExtractionPrompt] modules.shared.debugLogger
gateway.modules.services.serviceGeneration.mainServiceGeneration
----------------------------------------------------------------
[getAdaptiveExtractionPrompt] modules.services.serviceExtraction.subPromptBuilderExtraction
[renderReport] modules.services.serviceGeneration.renderers.registry
[generateDocumentWithTwoPhases] modules.services.serviceGeneration.subContentGenerator
[generateDocumentWithTwoPhases] modules.services.serviceGeneration.subStructureGenerator
gateway.modules.services.serviceGeneration.paths.codePath
---------------------------------------------------------
[generateCode] modules.datamodels.datamodelDocument
[_getCodeRenderer] modules.services.serviceGeneration.renderers.registry
[_generateCodeStructure] modules.shared.jsonContinuation
[_generateSingleFileContent] modules.shared.jsonContinuation
gateway.modules.services.serviceGeneration.renderers.rendererDocx
-----------------------------------------------------------------
[getAcceptedSectionTypes] modules.datamodels.datamodelJson
gateway.modules.services.serviceGeneration.renderers.rendererHtml
-----------------------------------------------------------------
[getAcceptedSectionTypes] modules.datamodels.datamodelJson
gateway.modules.services.serviceGeneration.renderers.rendererImage
------------------------------------------------------------------
[_compressPromptWithAi] modules.datamodels.datamodelAi
[_generateAiImage] modules.datamodels.datamodelAi
gateway.modules.services.serviceGeneration.renderers.rendererJson
-----------------------------------------------------------------
[getAcceptedSectionTypes] modules.datamodels.datamodelJson
gateway.modules.services.serviceGeneration.renderers.rendererMarkdown
---------------------------------------------------------------------
[getAcceptedSectionTypes] modules.datamodels.datamodelJson
gateway.modules.services.serviceGeneration.renderers.rendererPdf
----------------------------------------------------------------
[_getAiStylesWithPdfColors] modules.datamodels.datamodelAi
[getAcceptedSectionTypes] modules.datamodels.datamodelJson
gateway.modules.services.serviceGeneration.renderers.rendererPptx
-----------------------------------------------------------------
[getAcceptedSectionTypes] modules.datamodels.datamodelJson
gateway.modules.services.serviceGeneration.renderers.rendererText
-----------------------------------------------------------------
[getAcceptedSectionTypes] modules.datamodels.datamodelJson
gateway.modules.services.serviceGeneration.renderers.rendererXlsx
-----------------------------------------------------------------
[_getAiStylesWithExcelColors] modules.datamodels.datamodelAi
[getAcceptedSectionTypes] modules.datamodels.datamodelJson
gateway.modules.services.serviceGeneration.subContentGenerator
--------------------------------------------------------------
[_generateImageSection] modules.datamodels.datamodelAi
[_generateSimpleSection] modules.datamodels.datamodelAi
[_generateSimpleSection] modules.shared.jsonUtils
gateway.modules.services.serviceGeneration.subStructureGenerator
----------------------------------------------------------------
[generateStructure] modules.datamodels.datamodelAi
gateway.modules.services.serviceUtils.mainServiceUtils
------------------------------------------------------
[storeDebugMessageAndDocuments] modules.interfaces.interfaceDbChat
[debugLogToFile] modules.shared.debugLogger
[writeDebugArtifact] modules.shared.debugLogger
[writeDebugFile] modules.shared.debugLogger
gateway.modules.shared.auditLogger
----------------------------------
[_ensureInitialized] modules.datamodels.datamodelAudit
[cleanupOldEntries] modules.datamodels.datamodelAudit
[getAuditLogs] modules.datamodels.datamodelAudit
[logEvent] modules.datamodels.datamodelAudit
[registerAuditLogCleanupScheduler] modules.shared.eventManagement
gateway.modules.shared.debugLogger
----------------------------------
[debugLogToFile] modules.shared.timeUtils
gateway.modules.shared.jsonUtils
--------------------------------
[buildContinuationContext] modules.shared.jsonContinuation
gateway.modules.workflows.automation.subAutomationSchedule
----------------------------------------------------------
[start] modules.shared.callbackRegistry
[start] modules.workflows.automation
gateway.modules.workflows.methods.methodAi.actions.generateCode
---------------------------------------------------------------
[generateCode] modules.datamodels.datamodelDocref
gateway.modules.workflows.methods.methodAi.actions.generateDocument
-------------------------------------------------------------------
[generateDocument] modules.datamodels.datamodelDocref
gateway.modules.workflows.methods.methodAi.actions.process
----------------------------------------------------------
[process] modules.datamodels.datamodelDocref
[process] modules.datamodels.datamodelWorkflow
gateway.modules.workflows.methods.methodChatbot.actions.queryDatabase
---------------------------------------------------------------------
[queryDatabase] modules.datamodels.datamodelDocref
gateway.modules.workflows.methods.methodOutlook.actions.composeAndDraftEmailWithContext
---------------------------------------------------------------------------------------
[composeAndDraftEmailWithContext] modules.datamodels.datamodelDocref
gateway.modules.workflows.methods.methodOutlook.actions.sendDraftEmail
----------------------------------------------------------------------
[sendDraftEmail] modules.datamodels.datamodelDocref
gateway.modules.workflows.methods.methodSharepoint.actions.copyFile
-------------------------------------------------------------------
[copyFile] modules.datamodels.datamodelDocref
gateway.modules.workflows.methods.methodSharepoint.actions.downloadFileByPath
-----------------------------------------------------------------------------
[downloadFileByPath] modules.datamodels.datamodelDocref
gateway.modules.workflows.methods.methodSharepoint.actions.uploadFile
---------------------------------------------------------------------
[uploadFile] modules.datamodels.datamodelDocref
gateway.modules.workflows.methods.methodSharepoint.helpers.documentParsing
--------------------------------------------------------------------------
[parseDocumentListForFolder] modules.datamodels.datamodelDocref
[parseDocumentListForFoundDocuments] modules.datamodels.datamodelDocref
gateway.modules.workflows.processing.core.actionExecutor
--------------------------------------------------------
[_createActionCompletionMessage] modules.workflows.processing.core.messageCreator
gateway.modules.workflows.processing.modes.modeDynamic
------------------------------------------------------
[_actExecute] modules.datamodels.datamodelAi
[_planSelect] modules.datamodels.datamodelAi
[_refineDecide] modules.datamodels.datamodelAi
[_actExecute] modules.datamodels.datamodelDocref
[_planSelect] modules.datamodels.datamodelDocref
[_actExecute] modules.datamodels.datamodelWorkflow
[_planSelect] modules.datamodels.datamodelWorkflow
[_actExecute] modules.shared.jsonUtils
[_planSelect] modules.shared.jsonUtils
[_refineDecide] modules.shared.jsonUtils
[_actExecute] modules.workflows.processing.shared.methodDiscovery
gateway.modules.workflows.processing.shared.placeholderFactory
--------------------------------------------------------------
[extractReviewContent] modules.datamodels.datamodelChat
[extractLatestRefinementFeedback] modules.interfaces.interfaceDbApp
[extractLatestRefinementFeedback] modules.interfaces.interfaceDbChat
gateway.modules.workflows.workflowManager
-----------------------------------------
[_executeTasks] modules.datamodels.datamodelWorkflow
[workflowStart] modules.workflows.processing.shared.methodDiscovery
[_checkIfHistoryAvailable] modules.workflows.processing.shared.placeholderFactory
================================================================================
CIRCULAR DEPENDENCY (must stay in function)
================================================================================
gateway.modules.shared.auditLogger
----------------------------------
[_ensureInitialized] modules.connectors.connectorDbPostgre
gateway.modules.shared.configuration
------------------------------------
[decryptValue] modules.shared.auditLogger
[encryptValue] modules.shared.auditLogger
[get] modules.shared.auditLogger

File diff suppressed because it is too large Load diff

View file

@ -39,10 +39,6 @@ AUTH_DIR = GATEWAY_DIR / "modules" / "auth"
# Value: set of function names that must remain async def # Value: set of function names that must remain async def
_MUST_STAY_ASYNC: Dict[str, Set[str]] = { _MUST_STAY_ASYNC: Dict[str, Set[str]] = {
# --- routes/ --- # --- routes/ ---
"modules/routes/routeAdminRbacExport.py": {
"import_global_rbac", # await file.read()
"import_mandate_rbac", # await file.read()
},
"modules/routes/routeDataConnections.py": { "modules/routes/routeDataConnections.py": {
"get_connections", # await token_refresh_service.refresh_expired_tokens(...) "get_connections", # await token_refresh_service.refresh_expired_tokens(...)
}, },