Compare commits

..

No commits in common. "1f40c59afc35e63b38d07c5ac892a4b5340f7bd3" and "cb5f2d60c4773bb83406a7d95481d2c53168de19" have entirely different histories.

162 changed files with 1403 additions and 8331 deletions

15
app.py
View file

@ -360,18 +360,6 @@ async def lifespan(app: FastAPI):
eventManager.set_event_loop(main_loop)
from modules.workflows.scheduler.mainScheduler import setMainLoop as setSchedulerMainLoop
setSchedulerMainLoop(main_loop)
# Suppress noisy ConnectionResetError from ProactorEventLoop on Windows
# when clients (browsers) close connections abruptly. This is a known
# asyncio issue on Windows: https://bugs.python.org/issue39010
def _suppressClientDisconnect(loop, ctx):
exc = ctx.get("exception")
if isinstance(exc, ConnectionResetError):
return
if isinstance(exc, ConnectionAbortedError):
return
loop.default_exception_handler(ctx)
main_loop.set_exception_handler(_suppressClientDisconnect)
except RuntimeError:
pass
eventManager.start()
@ -615,9 +603,6 @@ app.include_router(userAccessOverviewRouter)
from modules.routes.routeAdminDemoConfig import router as demoConfigRouter
app.include_router(demoConfigRouter)
from modules.routes.routeAdminDatabaseHealth import router as adminDatabaseHealthRouter
app.include_router(adminDatabaseHealthRouter)
from modules.routes.routeGdpr import router as gdprRouter
app.include_router(gdprRouter)

View file

@ -7,10 +7,7 @@ High-level security functionality that depends on FastAPI and interfaces.
Multi-Tenant Design:
- RequestContext: Per-request context with user, mandate, feature instance, roles
- getRequestContext: FastAPI dependency to extract context from X-Mandate-Id header
- requireSysAdmin: FastAPI dependency for INFRASTRUCTURE-level operations
(logs, tokens, DB-health, i18n-master). Includes RBAC bypass.
- requirePlatformAdmin: FastAPI dependency for CROSS-MANDATE GOVERNANCE
(user-/mandate-/RBAC-/feature-registry mgmt). No bypass.
- requireSysAdmin: FastAPI dependency for system-level admin operations
"""
from .authentication import (
@ -22,7 +19,7 @@ from .authentication import (
RequestContext,
getRequestContext,
requireSysAdmin,
requirePlatformAdmin,
requireSysAdminRole,
)
from .jwtService import (
createAccessToken,
@ -48,7 +45,7 @@ __all__ = [
"RequestContext",
"getRequestContext",
"requireSysAdmin",
"requirePlatformAdmin",
"requireSysAdminRole",
# JWT Service
"createAccessToken",
"createRefreshToken",

View file

@ -272,6 +272,7 @@ class RequestContext:
# Request-scoped cache: rules loaded only once per request
self._cachedRules: Optional[List[tuple]] = None
self._cachedHasSysAdminRole: Optional[bool] = None
def getRules(self) -> List[tuple]:
"""
@ -298,17 +299,18 @@ class RequestContext:
@property
def isSysAdmin(self) -> bool:
"""Convenience property: Infrastructure/System Operator flag.
For Category A (Logs, Tokens, DB-Health, i18n-Master, Registry).
Wirkt auch als RBAC-Engine-Bypass (siehe rbac.py:getUserPermissions)."""
"""Convenience property to check if user has the isSysAdmin FLAG.
Category A only: true system operations (tokens, logs, databases)."""
return getattr(self.user, 'isSysAdmin', False)
@property
def isPlatformAdmin(self) -> bool:
"""Convenience property: Cross-Mandate-Governance flag.
For Categories BE (User-/Mandate-/RBAC-/Feature-Registry über alle Mandanten).
KEIN RBAC-Bypass Daten-Zugriff geht weiterhin über Mandanten-Mitgliedschaft."""
return getattr(self.user, 'isPlatformAdmin', False)
def hasSysAdminRole(self) -> bool:
"""Check if user has sysadmin ROLE in root mandate (cached per request).
Use for admin operations (Categories B/C/D/E) instead of isSysAdmin flag."""
if self._cachedHasSysAdminRole is None:
self._cachedHasSysAdminRole = _hasSysAdminRole(str(self.user.id))
return self._cachedHasSysAdminRole
def getRequestContext(
request: Request,
@ -321,37 +323,33 @@ def getRequestContext(
Checks authorization and loads role IDs.
Security Model:
- Regular users: Must be explicit members of mandates/feature instances.
- isSysAdmin users: RBAC-Engine-Bypass; können jeden Mandant für
Infrastruktur-Operationen betreten ohne Mitgliedschaft. ``ctx.roleIds``
bleibt leer (Bypass läuft direkt in ``rbac.py:getUserPermissions``).
- isPlatformAdmin users: Cross-Mandate-Governance; können jeden Mandant
betreten, aber Routen prüfen die Berechtigung explizit via
``requirePlatformAdmin``. ``ctx.roleIds`` bleibt leer.
- Regular users: Must be explicit members of mandates/feature instances
- SysAdmin users: Can access ANY mandate for administrative operations.
Root mandate roles (incl. sysadmin role) are loaded for RBAC-based authorization.
Routes use ctx.hasSysAdminRole for admin checks (not ctx.isSysAdmin flag).
Args:
request: FastAPI Request object
mandateId: Mandate ID from X-Mandate-Id header
featureInstanceId: Feature instance ID from X-Instance-Id header
currentUser: Current authenticated user
Returns:
RequestContext with user, mandate, roles
Raises:
HTTPException 403: If user is not member of mandate (and not Sys/Platform admin)
HTTPException 403: If non-SysAdmin user is not member of mandate or has no feature access
"""
ctx = RequestContext(user=currentUser)
isSysAdmin = getattr(currentUser, 'isSysAdmin', False)
isPlatformAdmin = getattr(currentUser, 'isPlatformAdmin', False)
# Get root interface for membership checks
rootInterface = getRootInterface()
if mandateId:
# Check mandate membership
membership = rootInterface.getUserMandate(currentUser.id, mandateId)
if membership:
# User is a member - load their roles
if not membership.enabled:
@ -361,16 +359,12 @@ def getRequestContext(
)
ctx.mandateId = mandateId
ctx.roleIds = rootInterface.getRoleIdsForUserMandate(membership.id)
elif isSysAdmin or isPlatformAdmin:
# Platform-level authority can enter any mandate without membership.
# No fake role loading: isSysAdmin bypasses RBAC engine; platform-admin
# routes verify authority explicitly via requirePlatformAdmin.
elif isSysAdmin:
# SysAdmin can access any mandate for admin operations
# Load root mandate roles for RBAC-based authorization (includes sysadmin role)
ctx.mandateId = mandateId
ctx.roleIds = []
logger.debug(
f"Platform-level user {currentUser.id} accessing mandate {mandateId} "
f"(isSysAdmin={isSysAdmin}, isPlatformAdmin={isPlatformAdmin})"
)
ctx.roleIds = _getRootMandateRoleIds(rootInterface, str(currentUser.id))
logger.debug(f"SysAdmin {currentUser.id} accessing mandate {mandateId} with root mandate roles")
else:
# Regular user without membership - denied
logger.warning(f"User {currentUser.id} is not member of mandate {mandateId}")
@ -378,11 +372,11 @@ def getRequestContext(
status_code=status.HTTP_403_FORBIDDEN,
detail="Not member of mandate"
)
if featureInstanceId:
# Check feature access
access = rootInterface.getFeatureAccess(currentUser.id, featureInstanceId)
if access:
# User has access - load their instance roles
if not access.enabled:
@ -393,15 +387,13 @@ def getRequestContext(
ctx.featureInstanceId = featureInstanceId
instanceRoleIds = rootInterface.getRoleIdsForFeatureAccess(access.id)
ctx.roleIds.extend(instanceRoleIds)
elif isSysAdmin or isPlatformAdmin:
# Platform-level authority can enter any feature instance without
# explicit access record.
elif isSysAdmin:
# SysAdmin can access any feature instance for admin operations
ctx.featureInstanceId = featureInstanceId
logger.debug(
f"Platform-level user {currentUser.id} accessing feature instance "
f"{featureInstanceId} (isSysAdmin={isSysAdmin}, "
f"isPlatformAdmin={isPlatformAdmin})"
)
# If no roles loaded yet, load root mandate roles
if not ctx.roleIds:
ctx.roleIds = _getRootMandateRoleIds(rootInterface, str(currentUser.id))
logger.debug(f"SysAdmin {currentUser.id} accessing feature instance {featureInstanceId} with root mandate roles")
else:
# Regular user without access - denied
logger.warning(f"User {currentUser.id} has no access to feature instance {featureInstanceId}")
@ -409,7 +401,7 @@ def getRequestContext(
status_code=status.HTTP_403_FORBIDDEN,
detail="No access to feature instance"
)
return ctx
@ -452,46 +444,95 @@ def requireSysAdmin(currentUser: User = Depends(getCurrentUser)) -> User:
# =============================================================================
# PLATFORM ADMIN: Flag-based cross-mandate governance (replaces sysadmin role)
# SYSADMIN ROLE: RBAC-based admin checks (hybrid model)
# =============================================================================
def requirePlatformAdmin(currentUser: User = Depends(getCurrentUser)) -> User:
def _getRootMandateRoleIds(rootInterface, userId: str) -> List[str]:
"""
Require Platform-Admin flag for cross-mandate governance operations.
Load the user's role IDs from the root mandate.
Used by auth middleware to provide RBAC roles for SysAdmin cross-mandate access.
Args:
rootInterface: Root database interface
userId: User ID
Returns:
List of role IDs from root mandate membership, empty list if no membership
"""
try:
rootMandateId = rootInterface._getRootMandateId()
if not rootMandateId:
return []
membership = rootInterface.getUserMandate(userId, rootMandateId)
if not membership:
return []
return rootInterface.getRoleIdsForUserMandate(membership.id)
except Exception as e:
logger.error(f"Error loading root mandate roles: {e}")
return []
Verwendung für alle Operationen, die mandanten-übergreifend wirken:
User-Mgmt, Mandate-Mgmt, RBAC-Catalog, Feature-Registry, User-Access-Overview,
Cross-Mandate-Audit, Cross-Mandate-Billing-Übersicht, Subscription-Mgmt.
KEIN RBAC-Bypass: Daten-Zugriff auf einen einzelnen Mandanten erfordert
weiterhin Mitgliedschaft (oder zusätzlich isSysAdmin für Infrastruktur-Bypass).
def _hasSysAdminRole(userId: str) -> bool:
"""
Check if a user has the sysadmin role in the root mandate.
Standalone check that queries the database directly, independent of
request context. Used for authorization checks where the sysadmin
ROLE (not just the isSysAdmin flag) is required.
Args:
userId: User ID to check
Returns:
True if user has sysadmin role in root mandate
"""
try:
rootInterface = getRootInterface()
roleIds = _getRootMandateRoleIds(rootInterface, str(userId))
for roleId in roleIds:
role = rootInterface.getRole(roleId)
if role and role.roleLabel == "sysadmin":
return True
return False
except Exception as e:
logger.error(f"Error checking sysadmin role: {e}")
return False
def requireSysAdminRole(currentUser: User = Depends(getCurrentUser)) -> User:
"""
Require sysadmin ROLE for admin operations.
Unlike requireSysAdmin (which checks the isSysAdmin FLAG for system-level ops),
this dependency checks the sysadmin ROLE in the root mandate.
Use for admin operations that should be RBAC-controlled (Category E).
Args:
currentUser: Current authenticated user
Returns:
User if they have isPlatformAdmin=True
User if they have the sysadmin role
Raises:
HTTPException 403: If user is not a Platform Admin
HTTPException 403: If user doesn't have sysadmin role
"""
if not getattr(currentUser, 'isPlatformAdmin', False):
if not _hasSysAdminRole(str(currentUser.id)):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Platform admin privileges required"
detail="SysAdmin role required"
)
# Audit for all Platform-Admin actions
# Audit
try:
from modules.shared.auditLogger import audit_logger
audit_logger.logSecurityEvent(
userId=str(currentUser.id),
mandateId="system",
action="platform_admin_action",
details="Cross-mandate governance operation"
action="sysadmin_role_action",
details="Admin operation via sysadmin role"
)
except Exception:
pass
return currentUser

View file

@ -15,7 +15,6 @@ from google.cloud import speech
from google.cloud import translate_v2 as translate
from google.cloud import texttospeech
from modules.shared.configuration import APP_CONFIG
from modules.shared.voiceCatalog import getDefaultVoice as _catalogDefaultVoice
logger = logging.getLogger(__name__)
@ -563,34 +562,16 @@ class ConnectorGoogleSpeech:
"""Google TTS WaveNet cost: ~$0.000004/char."""
return round(characterCount * 0.000004, 8)
@staticmethod
def _normalizeLanguageCode(code: Optional[str]) -> Optional[str]:
"""Normalize a user/LLM-supplied language hint to an ISO-639-1 code or None.
Google Cloud Translation v2 only accepts ISO codes (e.g. 'de', 'en') or
an omitted source for auto-detection. Strings like 'auto', '' or full
BCP-47 tags ('de-DE') would otherwise reach the API and trigger
'400 Invalid Value'. Centralising the mapping here keeps every caller
(tools, interface, internal pipelines) safe.
"""
if not code:
return None
normalized = code.strip().lower()
if not normalized or normalized in ("auto", "detect", "any", "*"):
return None
return normalized.split("-")[0]
async def translateText(self, text: str, targetLanguage: str = "en",
sourceLanguage: Optional[str] = None) -> Dict:
sourceLanguage: str = "de") -> Dict:
"""
Translate text using Google Cloud Translation API.
Args:
text: Text to translate
targetLanguage: Target language code (e.g., 'en', 'de')
sourceLanguage: Source language code (e.g., 'de', 'en'); pass None
or 'auto' for Google's auto-detection.
target_language: Target language code (e.g., 'en', 'de')
source_language: Source language code (e.g., 'de', 'en')
Returns:
Dict containing translated text and metadata
"""
@ -602,18 +583,14 @@ class ConnectorGoogleSpeech:
"translated_text": "",
"error": "Empty text provided"
}
normalizedSource = self._normalizeLanguageCode(sourceLanguage)
normalizedTarget = self._normalizeLanguageCode(targetLanguage) or "en"
logger.info(
f"🌐 Translating: '{text}' "
f"({normalizedSource or 'auto'} -> {normalizedTarget})"
)
logger.info(f"🌐 Translating: '{text}' ({sourceLanguage} -> {targetLanguage})")
# Perform translation
result = self.translate_client.translate(
text,
source_language=normalizedSource,
target_language=normalizedTarget,
source_language=sourceLanguage,
target_language=targetLanguage
)
translatedText = result['translatedText']
@ -731,8 +708,8 @@ class ConnectorGoogleSpeech:
# Step 2: Translation
translationResult = await self.translateText(
text=originalText,
sourceLanguage=fromLanguage,
targetLanguage=toLanguage,
sourceLanguage=fromLanguage.split('-')[0], # Convert 'de-DE' to 'de'
targetLanguage=toLanguage.split('-')[0] # Convert 'en-US' to 'en'
)
if not translationResult["success"]:
@ -941,26 +918,33 @@ class ConnectorGoogleSpeech:
stripped = voiceName.strip()
return bool(stripped) and "-" not in stripped
async def textToSpeech(self, text: str, languageCode: str = "de-DE", voiceName: Optional[str] = None) -> Dict[str, Any]:
async def textToSpeech(self, text: str, languageCode: str = "de-DE", voiceName: str = None) -> Dict[str, Any]:
"""
Convert text to speech using Google Cloud Text-to-Speech.
Args:
text: Text to convert to speech
languageCode: BCP-47 language code (e.g., 'de-DE', 'en-US', 'ru-RU')
voiceName: Specific voice name (optional). If omitted, a curated
default is used; if no curated default exists for the language,
Google selects a default voice automatically based on
languageCode + ssml_gender (no hard failure).
language_code: Language code (e.g., 'de-DE', 'en-US')
voice_name: Specific voice name (optional)
Returns:
Dict with success status and audio data
"""
try:
logger.info(f"Converting text to speech: '{text[:50]}...' in {languageCode}")
# Build the voice request
selectedVoice = voiceName or self._getDefaultVoice(languageCode)
isGeminiVoice = self._isGeminiTtsSpeakerVoiceName(selectedVoice) if selectedVoice else False
if not selectedVoice:
return {
"success": False,
"error": f"No voice specified for language {languageCode}. Please select a voice."
}
logger.info(f"Using TTS voice: {selectedVoice} for language: {languageCode}")
isGeminiVoice = self._isGeminiTtsSpeakerVoiceName(selectedVoice)
if isGeminiVoice:
synthesisInput = texttospeech.SynthesisInput(
@ -975,23 +959,11 @@ class ConnectorGoogleSpeech:
)
else:
synthesisInput = texttospeech.SynthesisInput(text=text)
voiceKwargs: Dict[str, Any] = {
"language_code": languageCode,
"ssml_gender": texttospeech.SsmlVoiceGender.NEUTRAL,
}
if selectedVoice:
voiceKwargs["name"] = selectedVoice
else:
logger.info(
f"TTS: no curated voice for '{languageCode}', "
f"letting Google auto-select by language + gender"
)
voice = texttospeech.VoiceSelectionParams(**voiceKwargs)
logger.info(
f"Using TTS voice: {selectedVoice or '<google-auto>'} "
f"for language: {languageCode}"
)
voice = texttospeech.VoiceSelectionParams(
language_code=languageCode,
name=selectedVoice,
ssml_gender=texttospeech.SsmlVoiceGender.NEUTRAL,
)
audioConfig = texttospeech.AudioConfig(
audio_encoding=texttospeech.AudioEncoding.MP3
@ -1000,15 +972,16 @@ class ConnectorGoogleSpeech:
response = self.tts_client.synthesize_speech(
input=synthesisInput,
voice=voice,
audio_config=audioConfig,
audio_config=audioConfig
)
# Return the audio content
return {
"success": True,
"audio_content": response.audio_content,
"audio_format": "mp3",
"language_code": languageCode,
"voice_name": selectedVoice or "<google-auto>",
"voice_name": voice.name
}
except Exception as e:
@ -1023,15 +996,59 @@ class ConnectorGoogleSpeech:
"error": f"Text-to-Speech failed: {detail}{extra}",
}
def _getDefaultVoice(self, languageCode: str) -> Optional[str]:
"""Return the curated default Google TTS voice for `languageCode`.
Delegates to the central voice catalog; returns None when no curated
voice exists, in which case the caller omits `name` and Google
auto-selects based on languageCode + ssml_gender.
def _getDefaultVoice(self, languageCode: str) -> str:
"""
return _catalogDefaultVoice(languageCode)
Get default voice name for a language code.
Falls back to a Wavenet voice for common languages.
"""
_defaults = {
"de-DE": "de-DE-Wavenet-A",
"de-CH": "de-DE-Wavenet-A",
"en-US": "en-US-Wavenet-C",
"en-GB": "en-GB-Wavenet-A",
"fr-FR": "fr-FR-Wavenet-A",
"it-IT": "it-IT-Wavenet-A",
}
return _defaults.get(languageCode)
async def getAvailableLanguages(self) -> Dict[str, Any]:
"""
Get available languages from Google Cloud Text-to-Speech.
Returns:
Dict containing success status and list of available languages
"""
try:
logger.info("🌐 Getting available languages from Google Cloud TTS")
# List voices from Google Cloud TTS
response = self.tts_client.list_voices()
# Extract unique language codes
# Note: Google TTS API doesn't provide language descriptions, only codes
language_codes = set()
for voice in response.voices:
if voice.language_codes:
language_codes.update(voice.language_codes)
# Convert to sorted list of language codes
available_languages = sorted(list(language_codes))
logger.info(f"✅ Found {len(available_languages)} available languages")
return {
"success": True,
"languages": available_languages
}
except Exception as e:
logger.error(f"❌ Failed to get available languages: {e}")
return {
"success": False,
"error": str(e),
"languages": []
}
async def getAvailableVoices(self, languageCode: Optional[str] = None) -> Dict[str, Any]:
"""
Get available voices from Google Cloud Text-to-Speech.

View file

@ -34,7 +34,7 @@ class AiAuditLogEntry(BaseModel):
userId: str = Field(
description="ID of the user who triggered the AI call",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
json_schema_extra={"label": "Benutzer-ID"},
)
username: Optional[str] = Field(
default=None,
@ -43,17 +43,17 @@ class AiAuditLogEntry(BaseModel):
)
mandateId: str = Field(
description="Mandate context of the call",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
json_schema_extra={"label": "Mandanten-ID"},
)
featureInstanceId: Optional[str] = Field(
default=None,
description="Feature instance context",
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
json_schema_extra={"label": "Feature-Instanz-ID"},
)
featureCode: Optional[str] = Field(
default=None,
description="Feature code (e.g. workspace, trustee)",
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
json_schema_extra={"label": "Feature"},
)
instanceLabel: Optional[str] = Field(
default=None,

View file

@ -106,13 +106,7 @@ class AuditLogEntry(BaseModel):
# Actor identification
userId: str = Field(
description="ID of the user who performed the action (or 'system' for system events)",
json_schema_extra={
"label": "Benutzer-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"},
},
json_schema_extra={"label": "Benutzer-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}
)
username: Optional[str] = Field(
@ -125,25 +119,13 @@ class AuditLogEntry(BaseModel):
mandateId: Optional[str] = Field(
default=None,
description="Mandate context (if applicable)",
json_schema_extra={
"label": "Mandanten-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
json_schema_extra={"label": "Mandanten-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
featureInstanceId: Optional[str] = Field(
default=None,
description="Feature instance context (if applicable)",
json_schema_extra={
"label": "Feature-Instanz-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
json_schema_extra={"label": "Feature-Instanz-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
# Event classification

View file

@ -2,28 +2,16 @@
# All rights reserved.
"""Base Pydantic model with system-managed fields (DB + API + UI metadata)."""
from typing import Dict, Optional, Type
from typing import Optional
from pydantic import BaseModel, Field
from modules.shared.i18nRegistry import i18nModel
_MODEL_REGISTRY: Dict[str, Type["PowerOnModel"]] = {}
def _getModelByTableName(tableName: str) -> Optional[Type["PowerOnModel"]]:
"""Look up a PowerOnModel subclass by its table name (= class name)."""
return _MODEL_REGISTRY.get(tableName)
@i18nModel("Basisdatensatz")
class PowerOnModel(BaseModel):
"""Basis-Datenmodell mit System-Audit-Feldern fuer alle DB-Tabellen."""
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
_MODEL_REGISTRY[cls.__name__] = cls
sysCreatedAt: Optional[float] = Field(
default=None,
description="Record creation timestamp (UTC, set by system)",

View file

@ -46,15 +46,11 @@ class BillingAccount(PowerOnModel):
description="Primary key",
json_schema_extra={"label": "ID"},
)
mandateId: str = Field(
...,
description="Foreign key to Mandate",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
)
mandateId: str = Field(..., description="Foreign key to Mandate", json_schema_extra={"label": "Mandanten-ID"})
userId: Optional[str] = Field(
None,
description="Foreign key to User (None = mandate pool account, set = user audit account)",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
json_schema_extra={"label": "Benutzer-ID"},
)
balance: float = Field(default=0.0, description="Current balance in CHF", json_schema_extra={"label": "Guthaben (CHF)"})
warningThreshold: float = Field(
@ -78,11 +74,7 @@ class BillingTransaction(PowerOnModel):
description="Primary key",
json_schema_extra={"label": "ID"},
)
accountId: str = Field(
...,
description="Foreign key to BillingAccount",
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount"}},
)
accountId: str = Field(..., description="Foreign key to BillingAccount", json_schema_extra={"label": "Konto-ID"})
transactionType: TransactionTypeEnum = Field(..., description="Transaction type", json_schema_extra={"label": "Typ"})
amount: float = Field(..., description="Amount in CHF (always positive)", json_schema_extra={"label": "Betrag (CHF)"})
description: str = Field(..., description="Transaction description", json_schema_extra={"label": "Beschreibung"})
@ -92,28 +84,12 @@ class BillingTransaction(PowerOnModel):
referenceId: Optional[str] = Field(None, description="Reference ID", json_schema_extra={"label": "Referenz-ID"})
# Context for workflow transactions
workflowId: Optional[str] = Field(
None,
description="Workflow ID (for WORKFLOW transactions; may be Chat or Graphical Editor)",
json_schema_extra={"label": "Workflow-ID"},
)
featureInstanceId: Optional[str] = Field(
None,
description="Feature instance ID",
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
)
featureCode: Optional[str] = Field(
None,
description="Feature code (e.g., automation)",
json_schema_extra={"label": "Feature-Code", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
)
workflowId: Optional[str] = Field(None, description="Workflow ID (for WORKFLOW transactions)", json_schema_extra={"label": "Workflow-ID"})
featureInstanceId: Optional[str] = Field(None, description="Feature instance ID", json_schema_extra={"label": "Feature-Instanz-ID"})
featureCode: Optional[str] = Field(None, description="Feature code (e.g., automation)", json_schema_extra={"label": "Feature-Code"})
aicoreProvider: Optional[str] = Field(None, description="AICore provider (anthropic, openai, etc.)", json_schema_extra={"label": "AI-Anbieter"})
aicoreModel: Optional[str] = Field(None, description="AICore model name (e.g., claude-4-sonnet, gpt-4o)", json_schema_extra={"label": "AI-Modell"})
createdByUserId: Optional[str] = Field(
None,
description="User who created/caused this transaction",
json_schema_extra={"label": "Erstellt von Benutzer", "fk_target": {"db": "poweron_app", "table": "User"}},
)
createdByUserId: Optional[str] = Field(None, description="User who created/caused this transaction", json_schema_extra={"label": "Erstellt von Benutzer"})
# AI call metadata (for per-call analytics)
processingTime: Optional[float] = Field(None, description="Processing time in seconds", json_schema_extra={"label": "Verarbeitungszeit (s)"})
@ -130,11 +106,7 @@ class BillingSettings(BaseModel):
description="Primary key",
json_schema_extra={"label": "ID"},
)
mandateId: str = Field(
...,
description="Foreign key to Mandate (UNIQUE)",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
)
mandateId: str = Field(..., description="Foreign key to Mandate (UNIQUE)", json_schema_extra={"label": "Mandanten-ID"})
warningThresholdPercent: float = Field(
default=10.0,
@ -207,11 +179,7 @@ class UsageStatistics(BaseModel):
description="Primary key",
json_schema_extra={"label": "ID"},
)
accountId: str = Field(
...,
description="Foreign key to BillingAccount",
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount"}},
)
accountId: str = Field(..., description="Foreign key to BillingAccount", json_schema_extra={"label": "Konto-ID"})
periodType: PeriodTypeEnum = Field(..., description="Period type", json_schema_extra={"label": "Periodentyp"})
periodStart: date = Field(..., description="Period start date", json_schema_extra={"label": "Periodenbeginn"})

View file

@ -14,10 +14,7 @@ import uuid
class ChatLog(PowerOnModel):
"""Log entries for chat workflows. User-owned, no mandate context."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
workflowId: str = Field(
description="Foreign key to workflow",
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
)
workflowId: str = Field(description="Foreign key to workflow", json_schema_extra={"label": "Workflow-ID"})
message: str = Field(description="Log message", json_schema_extra={"label": "Nachricht"})
type: str = Field(description="Log type (info, warning, error, etc.)", json_schema_extra={"label": "Typ"})
timestamp: float = Field(default_factory=getUtcTimestamp,
@ -35,14 +32,8 @@ class ChatLog(PowerOnModel):
class ChatDocument(PowerOnModel):
"""Documents attached to chat messages. User-owned, no mandate context."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
messageId: str = Field(
description="Foreign key to message",
json_schema_extra={"label": "Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage"}},
)
fileId: str = Field(
description="Foreign key to file",
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem"}},
)
messageId: str = Field(description="Foreign key to message", json_schema_extra={"label": "Nachrichten-ID"})
fileId: str = Field(description="Foreign key to file", json_schema_extra={"label": "Datei-ID"})
fileName: str = Field(description="Name of the file", json_schema_extra={"label": "Dateiname"})
fileSize: int = Field(description="Size of the file", json_schema_extra={"label": "Dateigröße"})
mimeType: str = Field(description="MIME type of the file", json_schema_extra={"label": "MIME-Typ"})
@ -79,15 +70,8 @@ class ChatContentExtracted(BaseModel):
class ChatMessage(PowerOnModel):
"""Messages in chat workflows. User-owned, no mandate context."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
workflowId: str = Field(
description="Foreign key to workflow",
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
)
parentMessageId: Optional[str] = Field(
None,
description="Parent message ID for threading",
json_schema_extra={"label": "Übergeordnete Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage"}},
)
workflowId: str = Field(description="Foreign key to workflow", json_schema_extra={"label": "Workflow-ID"})
parentMessageId: Optional[str] = Field(None, description="Parent message ID for threading", json_schema_extra={"label": "Übergeordnete Nachrichten-ID"})
documents: List[ChatDocument] = Field(default_factory=list, description="Associated documents", json_schema_extra={"label": "Dokumente"})
documentsLabel: Optional[str] = Field(None, description="Label for the set of documents", json_schema_extra={"label": "Dokumenten-Label"})
message: Optional[str] = Field(None, description="Message content", json_schema_extra={"label": "Nachricht"})
@ -117,32 +101,7 @@ class WorkflowModeEnum(str, Enum):
class ChatWorkflow(PowerOnModel):
"""Chat workflow container. User-owned, no mandate context."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
featureInstanceId: Optional[str] = Field(
None,
description="Feature instance ID for multi-tenancy isolation",
json_schema_extra={
"label": "Feature-Instanz-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
)
linkedWorkflowId: Optional[str] = Field(
None,
description=(
"Optional foreign key linking this chat to an entity outside the "
"ChatWorkflow table (e.g. an Automation2Workflow in the GraphicalEditor "
"AI editor chat). NULL for the default workspace chats. Combined with "
"featureInstanceId this gives a 1:1 relation entity ↔ chat per feature."
),
json_schema_extra={
"label": "Verknüpfter Workflow",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
},
)
featureInstanceId: Optional[str] = Field(None, description="Feature instance ID for multi-tenancy isolation", json_schema_extra={"label": "Feature-Instanz-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
status: str = Field(default="running", description="Current status of the workflow", json_schema_extra={"frontend_type": "select", "frontend_readonly": False, "frontend_required": False, "frontend_options": [
{"value": "running", "label": "Running"},
{"value": "completed", "label": "Completed"},
@ -210,11 +169,7 @@ class UserInputRequest(BaseModel):
prompt: str = Field(description="Prompt for the user", json_schema_extra={"label": "Eingabeaufforderung"})
listFileId: List[str] = Field(default_factory=list, description="List of file IDs", json_schema_extra={"label": "Datei-IDs"})
userLanguage: str = Field(default="en", description="User's preferred language", json_schema_extra={"label": "Benutzersprache"})
workflowId: Optional[str] = Field(
None,
description="Optional ID of the workflow to continue",
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
)
workflowId: Optional[str] = Field(None, description="Optional ID of the workflow to continue", json_schema_extra={"label": "Workflow-ID"})
allowedProviders: Optional[List[str]] = Field(None, description="List of allowed AI providers (multiselect)", json_schema_extra={"label": "Erlaubte Anbieter"})
@i18nModel("Aktions-Dokument")
@ -352,11 +307,7 @@ class ChatTaskResult(BaseModel):
@i18nModel("Aufgabe")
class TaskItem(BaseModel):
id: str = Field(..., description="Task ID", json_schema_extra={"label": "Aufgaben-ID"})
workflowId: str = Field(
...,
description="Workflow ID",
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
)
workflowId: str = Field(..., description="Workflow ID", json_schema_extra={"label": "Workflow-ID"})
userInput: str = Field(..., description="User input that triggered the task", json_schema_extra={"label": "Benutzereingabe"})
status: TaskStatus = Field(default=TaskStatus.PENDING, description="Task status", json_schema_extra={"label": "Status"})
error: Optional[str] = Field(None, description="Error message if task failed", json_schema_extra={"label": "Fehler"})

View file

@ -32,10 +32,7 @@ class ContentContextRef(BaseModel):
class ContentObject(BaseModel):
"""Scalar content object extracted from a file. No AI involved."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
fileId: str = Field(
description="FK to the physical file",
json_schema_extra={"fk_target": {"db": "poweron_management", "table": "FileItem"}},
)
fileId: str = Field(description="FK to the physical file")
contentType: str = Field(description="text, image, videostream, audiostream, other")
data: str = Field(default="", description="Content data (text, base64, URL)")
contextRef: ContentContextRef = Field(default_factory=ContentContextRef)

View file

@ -23,7 +23,7 @@ class DataSource(PowerOnModel):
)
connectionId: str = Field(
description="FK to UserConnection",
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection"}},
json_schema_extra={"label": "Verbindungs-ID"},
)
sourceType: str = Field(
description="sharepointFolder, googleDriveFolder, outlookFolder, ftpFolder, clickupList (path under /team/...)",
@ -45,17 +45,17 @@ class DataSource(PowerOnModel):
featureInstanceId: Optional[str] = Field(
default=None,
description="Scoped to feature instance",
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
json_schema_extra={"label": "Feature-Instanz"},
)
mandateId: Optional[str] = Field(
default=None,
description="Mandate scope",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
json_schema_extra={"label": "Mandanten-ID"},
)
userId: str = Field(
default="",
description="Owner user ID",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
json_schema_extra={"label": "Benutzer-ID"},
)
autoSync: bool = Field(
default=False,

View file

@ -18,7 +18,6 @@ class ContentExtracted(BaseModel):
id: str = Field(description="Extraction id or source document id")
parts: List[ContentPart] = Field(default_factory=list, description="List of extracted parts")
summary: Optional[Dict[str, Any]] = Field(default=None, description="Optional extraction summary")
udm: Optional[Any] = Field(default=None, description="Optional UdmDocument (when outputFormat is udm or both)")
class ChunkResult(BaseModel):
@ -76,19 +75,6 @@ class ExtractionOptions(BaseModel):
# Core extraction parameters
prompt: str = Field(default="", description="Extraction prompt for AI processing")
processDocumentsIndividually: bool = Field(default=True, description="Process each document separately")
outputFormat: Literal["parts", "udm", "both"] = Field(
default="parts",
description="Return flat parts only, UDM tree only, or both (parts always populated; udm when udm or both)",
)
outputDetail: Literal["full", "structure", "references"] = Field(
default="full",
description="Extraction detail: full inline data, skeleton without raw payloads, or file references only",
)
lazyContainer: bool = Field(
default=False,
description="For archives: emit file entries with metadata only (no nested extraction)",
)
# Image processing parameters
imageMaxPixels: int = Field(default=1024 * 1024, ge=1, description="Maximum pixels for image processing")

View file

@ -6,7 +6,7 @@ A FeatureDataSource links a FeatureInstance table (DATA_OBJECT) to a workspace
so the agent can query structured feature data (e.g. TrusteePosition rows).
"""
from typing import Dict, List, Optional
from typing import Dict, Optional
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.i18nRegistry import i18nModel
@ -23,11 +23,11 @@ class FeatureDataSource(PowerOnModel):
)
featureInstanceId: str = Field(
description="FK to FeatureInstance",
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
json_schema_extra={"label": "Feature-Instanz"},
)
featureCode: str = Field(
description="Feature code (e.g. trustee, commcoach)",
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
json_schema_extra={"label": "Feature"},
)
tableName: str = Field(
description="Table name from DATA_OBJECTS meta (e.g. TrusteePosition)",
@ -44,16 +44,16 @@ class FeatureDataSource(PowerOnModel):
mandateId: str = Field(
default="",
description="Mandate scope",
json_schema_extra={"label": "Mandant", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
json_schema_extra={"label": "Mandant"},
)
userId: str = Field(
default="",
description="Owner user ID",
json_schema_extra={"label": "Benutzer", "fk_target": {"db": "poweron_app", "table": "User"}},
json_schema_extra={"label": "Benutzer"},
)
workspaceInstanceId: str = Field(
description="Workspace feature instance where this source is used",
json_schema_extra={"label": "Workspace", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
description="Workspace instance where this source is used",
json_schema_extra={"label": "Workspace"},
)
scope: str = Field(
default="personal",
@ -70,11 +70,6 @@ class FeatureDataSource(PowerOnModel):
description="Whether this data source should be neutralized before AI processing",
json_schema_extra={"label": "Neutralisieren", "frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False},
)
neutralizeFields: Optional[List[str]] = Field(
default=None,
description="Column names whose values are replaced with placeholders before AI processing",
json_schema_extra={"label": "Zu neutralisierende Felder", "frontend_type": "multiselect", "frontend_readonly": False, "frontend_required": False},
)
recordFilter: Optional[Dict[str, str]] = Field(
default=None,
description="Record-level filter applied when querying this table, e.g. {'sessionId': 'abc-123'}",

View file

@ -38,23 +38,11 @@ class FeatureInstance(PowerOnModel):
)
featureCode: str = Field(
description="FK -> Feature.code",
json_schema_extra={
"label": "Feature",
"frontend_type": "select",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"},
},
json_schema_extra={"label": "Feature", "frontend_type": "select", "frontend_readonly": True, "frontend_required": True}
)
mandateId: str = Field(
description="FK -> Mandate.id (CASCADE DELETE)",
json_schema_extra={
"label": "Mandant",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}
)
label: str = Field(
default="",

View file

@ -24,59 +24,15 @@ class FileFolder(PowerOnModel):
parentId: Optional[str] = Field(
default=None,
description="Parent folder ID (null = root)",
json_schema_extra={
"label": "Uebergeordneter Ordner",
"frontend_type": "text",
"frontend_readonly": False,
"frontend_required": False,
"fk_target": {"db": "poweron_management", "table": "FileFolder"},
},
json_schema_extra={"label": "Uebergeordneter Ordner", "frontend_type": "text", "frontend_readonly": False, "frontend_required": False},
)
mandateId: Optional[str] = Field(
default=None,
description="Mandate context",
json_schema_extra={
"label": "Mandanten-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
json_schema_extra={"label": "Mandanten-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
)
featureInstanceId: Optional[str] = Field(
default=None,
description="Feature instance context",
json_schema_extra={
"label": "Feature-Instanz-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
)
scope: str = Field(
default="personal",
description="Data visibility scope: personal, featureInstance, mandate, global. Inherited by files in this folder.",
json_schema_extra={
"label": "Sichtbarkeit",
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": False,
"frontend_options": [
{"value": "personal", "label": "Persönlich"},
{"value": "featureInstance", "label": "Feature-Instanz"},
{"value": "mandate", "label": "Mandant"},
{"value": "global", "label": "Global"},
],
},
)
neutralize: bool = Field(
default=False,
description="Whether files in this folder should be neutralized before AI processing. Inherited by new/moved files.",
json_schema_extra={
"label": "Neutralisieren",
"frontend_type": "checkbox",
"frontend_readonly": False,
"frontend_required": False,
},
json_schema_extra={"label": "Feature-Instanz-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
)

View file

@ -33,7 +33,6 @@ class FileItem(PowerOnModel):
"frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label",
"fk_model": "Mandate",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
)
featureInstanceId: Optional[str] = Field(
@ -47,7 +46,6 @@ class FileItem(PowerOnModel):
"frontend_fk_source": "/api/features/instances",
"frontend_fk_display_field": "label",
"fk_model": "FeatureInstance",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
)
mimeType: str = Field(
@ -70,13 +68,7 @@ class FileItem(PowerOnModel):
folderId: Optional[str] = Field(
default=None,
description="ID of the parent folder",
json_schema_extra={
"label": "Ordner-ID",
"frontend_type": "text",
"frontend_readonly": False,
"frontend_required": False,
"fk_target": {"db": "poweron_management", "table": "FileFolder"},
},
json_schema_extra={"label": "Ordner-ID", "frontend_type": "text", "frontend_readonly": False, "frontend_required": False},
)
description: Optional[str] = Field(
default=None,

View file

@ -32,24 +32,12 @@ class Invitation(PowerOnModel):
mandateId: str = Field(
description="FK → Mandate.id - Target mandate for the invitation",
json_schema_extra={
"label": "Mandant",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}
)
featureInstanceId: Optional[str] = Field(
default=None,
description="Optional FK → FeatureInstance.id - Direct access to specific feature",
json_schema_extra={
"label": "Feature-Instanz",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
roleIds: List[str] = Field(
default_factory=list,
@ -75,13 +63,7 @@ class Invitation(PowerOnModel):
usedBy: Optional[str] = Field(
default=None,
description="User ID of the person who used the invitation",
json_schema_extra={
"label": "Verwendet von",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "User"},
},
json_schema_extra={"label": "Verwendet von", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
usedAt: Optional[float] = Field(
default=None,

View file

@ -30,17 +30,17 @@ class FileContentIndex(PowerOnModel):
)
userId: str = Field(
description="Owner user ID",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
json_schema_extra={"label": "Benutzer-ID"},
)
featureInstanceId: str = Field(
default="",
description="Feature instance scope",
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
json_schema_extra={"label": "Feature-Instanz-ID"},
)
mandateId: str = Field(
default="",
description="Mandate scope",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
json_schema_extra={"label": "Mandanten-ID"},
)
fileName: str = Field(
description="Original file name",
@ -116,16 +116,16 @@ class ContentChunk(PowerOnModel):
)
fileId: str = Field(
description="FK to the source file",
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem"}},
json_schema_extra={"label": "Datei-ID"},
)
userId: str = Field(
description="Owner user ID",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
json_schema_extra={"label": "Benutzer-ID"},
)
featureInstanceId: str = Field(
default="",
description="Feature instance scope",
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
json_schema_extra={"label": "Feature-Instanz-ID"},
)
contentType: str = Field(
description="Content type: text, image, videostream, audiostream, other",
@ -214,16 +214,16 @@ class WorkflowMemory(PowerOnModel):
)
workflowId: str = Field(
description="FK to the workflow",
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
json_schema_extra={"label": "Workflow-ID"},
)
userId: str = Field(
description="Owner user ID",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
json_schema_extra={"label": "Benutzer-ID"},
)
featureInstanceId: str = Field(
default="",
description="Feature instance scope",
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
json_schema_extra={"label": "Feature-Instanz-ID"},
)
key: str = Field(
description="Key identifier (e.g. 'entity:companyName')",

View file

@ -34,7 +34,6 @@ class UserMandate(PowerOnModel):
"frontend_fk_source": "/api/users/",
"frontend_fk_display_field": "username",
"fk_model": "User",
"fk_target": {"db": "poweron_app", "table": "User"},
},
)
mandateId: str = Field(
@ -47,7 +46,6 @@ class UserMandate(PowerOnModel):
"frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label",
"fk_model": "Mandate",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
)
enabled: bool = Field(
@ -70,27 +68,11 @@ class FeatureAccess(PowerOnModel):
)
userId: str = Field(
description="FK → User.id (CASCADE DELETE)",
json_schema_extra={
"label": "Benutzer",
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/users/",
"frontend_fk_display_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
},
json_schema_extra={"label": "Benutzer", "frontend_type": "select", "frontend_readonly": False, "frontend_required": True, "frontend_fk_source": "/api/users/", "frontend_fk_display_field": "username"}
)
featureInstanceId: str = Field(
description="FK → FeatureInstance.id (CASCADE DELETE)",
json_schema_extra={
"label": "Feature-Instanz",
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/features/instances",
"frontend_fk_display_field": "label",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "select", "frontend_readonly": False, "frontend_required": True, "frontend_fk_source": "/api/features/instances", "frontend_fk_display_field": "label"}
)
enabled: bool = Field(
default=True,
@ -112,25 +94,11 @@ class UserMandateRole(PowerOnModel):
)
userMandateId: str = Field(
description="FK → UserMandate.id (CASCADE DELETE)",
json_schema_extra={
"label": "Benutzer-Mandant",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "UserMandate"},
},
json_schema_extra={"label": "Benutzer-Mandant", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}
)
roleId: str = Field(
description="FK → Role.id (CASCADE DELETE)",
json_schema_extra={
"label": "Rolle",
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/rbac/roles",
"frontend_fk_display_field": "roleLabel",
"fk_target": {"db": "poweron_app", "table": "Role"},
},
json_schema_extra={"label": "Rolle", "frontend_type": "select", "frontend_readonly": False, "frontend_required": True, "frontend_fk_source": "/api/rbac/roles", "frontend_fk_display_field": "roleLabel"}
)
@ -147,23 +115,9 @@ class FeatureAccessRole(PowerOnModel):
)
featureAccessId: str = Field(
description="FK → FeatureAccess.id (CASCADE DELETE)",
json_schema_extra={
"label": "Feature-Zugang",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureAccess"},
},
json_schema_extra={"label": "Feature-Zugang", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}
)
roleId: str = Field(
description="FK → Role.id (CASCADE DELETE)",
json_schema_extra={
"label": "Rolle",
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_fk_source": "/api/rbac/roles",
"frontend_fk_display_field": "roleLabel",
"fk_target": {"db": "poweron_app", "table": "Role"},
},
json_schema_extra={"label": "Rolle", "frontend_type": "select", "frontend_readonly": False, "frontend_required": True, "frontend_fk_source": "/api/rbac/roles", "frontend_fk_display_field": "roleLabel"}
)

View file

@ -64,7 +64,6 @@ class MessagingSubscription(PowerOnModel):
"frontend_readonly": True,
"frontend_required": False,
"label": "Mandanten-ID",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
)
featureInstanceId: str = Field(
@ -74,7 +73,6 @@ class MessagingSubscription(PowerOnModel):
"frontend_readonly": True,
"frontend_required": False,
"label": "Feature-Instanz-ID",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
)
description: Optional[str] = Field(
@ -131,7 +129,6 @@ class MessagingSubscriptionRegistration(BaseModel):
"frontend_readonly": True,
"frontend_required": False,
"label": "Mandanten-ID",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
)
featureInstanceId: str = Field(
@ -141,7 +138,6 @@ class MessagingSubscriptionRegistration(BaseModel):
"frontend_readonly": True,
"frontend_required": False,
"label": "Feature-Instanz-ID",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
)
subscriptionId: str = Field(
@ -160,7 +156,6 @@ class MessagingSubscriptionRegistration(BaseModel):
"frontend_readonly": True,
"frontend_required": False,
"label": "Benutzer-ID",
"fk_target": {"db": "poweron_app", "table": "User"},
},
)
channel: MessagingChannel = Field(
@ -249,7 +244,6 @@ class MessagingDelivery(BaseModel):
"frontend_readonly": True,
"frontend_required": False,
"label": "Benutzer-ID",
"fk_target": {"db": "poweron_app", "table": "User"},
},
)
channel: MessagingChannel = Field(

View file

@ -60,13 +60,7 @@ class UserNotification(PowerOnModel):
)
userId: str = Field(
description="Target user ID for this notification",
json_schema_extra={
"label": "Benutzer",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"},
},
json_schema_extra={"label": "Benutzer", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True}
)
type: NotificationType = Field(

View file

@ -57,30 +57,12 @@ class Role(PowerOnModel):
mandateId: Optional[str] = Field(
default=None,
description="FK → Mandate.id (CASCADE DELETE). Null = Global/Template role.",
json_schema_extra={
"label": "Mandant",
"frontend_type": "select",
"frontend_readonly": True,
"frontend_visible": True,
"frontend_required": False,
"frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
json_schema_extra={"label": "Mandant", "frontend_type": "select", "frontend_readonly": True, "frontend_visible": True, "frontend_required": False, "frontend_fk_source": "/api/mandates/", "frontend_fk_display_field": "label"}
)
featureInstanceId: Optional[str] = Field(
default=None,
description="FK → FeatureInstance.id (CASCADE DELETE). Null = Mandate-level or Global role.",
json_schema_extra={
"label": "Feature-Instanz",
"frontend_type": "select",
"frontend_readonly": True,
"frontend_visible": True,
"frontend_required": False,
"frontend_fk_source": "/api/features/instances",
"frontend_fk_display_field": "label",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "select", "frontend_readonly": True, "frontend_visible": True, "frontend_required": False, "frontend_fk_source": "/api/features/instances", "frontend_fk_display_field": "label"}
)
featureCode: Optional[str] = Field(
default=None,
@ -110,15 +92,7 @@ class AccessRule(PowerOnModel):
)
roleId: str = Field(
description="FK → Role.id (CASCADE DELETE!)",
json_schema_extra={
"label": "Rolle",
"frontend_type": "select",
"frontend_readonly": True,
"frontend_required": True,
"frontend_fk_source": "/api/rbac/roles",
"frontend_fk_display_field": "roleLabel",
"fk_target": {"db": "poweron_app", "table": "Role"},
},
json_schema_extra={"label": "Rolle", "frontend_type": "select", "frontend_readonly": True, "frontend_required": True, "frontend_fk_source": "/api/rbac/roles", "frontend_fk_display_field": "roleLabel"}
)
context: AccessRuleContext = Field(
description="Context type: DATA (database), UI (interface), RESOURCE (system resources). IMMUTABLE!",

View file

@ -47,7 +47,7 @@ class Token(PowerOnModel):
)
userId: str = Field(
...,
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
json_schema_extra={"label": "Benutzer-ID"},
)
authority: AuthAuthority = Field(
...,
@ -56,7 +56,7 @@ class Token(PowerOnModel):
connectionId: Optional[str] = Field(
None,
description="ID of the connection this token belongs to",
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection"}},
json_schema_extra={"label": "Verbindungs-ID"},
)
tokenPurpose: Optional[TokenPurpose] = Field(
default=None,
@ -92,7 +92,7 @@ class Token(PowerOnModel):
revokedBy: Optional[str] = Field(
None,
description="User ID who revoked the token (admin/self)",
json_schema_extra={"label": "Widerrufen von", "fk_target": {"db": "poweron_app", "table": "User"}},
json_schema_extra={"label": "Widerrufen von"},
)
reason: Optional[str] = Field(
None,
@ -134,13 +134,7 @@ class AuthEvent(PowerOnModel):
)
userId: str = Field(
description="ID of the user this event belongs to",
json_schema_extra={
"label": "Benutzer-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"},
},
json_schema_extra={"label": "Benutzer-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True},
)
eventType: str = Field(
description="Type of authentication event (e.g., 'login', 'logout', 'token_refresh')",

View file

@ -207,7 +207,7 @@ class MandateSubscription(PowerOnModel):
mandateId: str = Field(
...,
description="Foreign key to Mandate",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
json_schema_extra={"label": "Mandanten-ID"},
)
planKey: str = Field(
...,

View file

@ -6,15 +6,7 @@ UAM models: User, Mandate, UserConnection.
Multi-Tenant Design:
- User gehört NICHT direkt zu einem Mandanten
- Zugehörigkeit wird über UserMandate gesteuert (siehe datamodelMembership.py)
- Zwei orthogonale Plattform-Autoritäts-Flags:
* isSysAdmin Infrastruktur-Operator (Logs, Tokens, DB-Health,
i18n-Master, Registry). RBAC-Engine-Bypass.
KEIN Cross-Mandate-Governance.
* isPlatformAdmin Cross-Mandate-Governance (User-/Mandate-/RBAC-/
Feature-Verwaltung über alle Mandanten).
KEIN RBAC-Bypass.
Beide einzeln vergebbar, einzeln auditierbar.
Siehe wiki/c-work/4-done/2026-04-sysadmin-authority-split.md
- isSysAdmin ist globales Admin-Flag für System-Zugriff (KEIN Daten-Zugriff!)
"""
import uuid
@ -23,7 +15,6 @@ from enum import Enum
from pydantic import BaseModel, Field, EmailStr, field_validator, computed_field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.i18nRegistry import i18nModel, normalizePrimaryLanguageTag
from modules.shared.mandateNameUtils import MANDATE_NAME_MAX_LEN, MANDATE_NAME_MIN_LEN
from modules.shared.timeUtils import getUtcTimestamp
@ -75,11 +66,6 @@ class Mandate(PowerOnModel):
"""
Mandate (Mandant/Tenant) model.
Ein Mandant ist ein isolierter Bereich für Daten und Berechtigungen.
Semantik:
- ``name`` (Kurzzeichen): plattformweit eindeutiger, stabiler technischer Code (Slug),
Audit-/Referenz-Identifier. Nur Kleinbuchstaben, Ziffern und ``-`` (Länge 232).
- ``label`` (Voller Name): Anzeigename im UI, frei änderbar unabhängig vom Slug.
"""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
@ -87,26 +73,13 @@ class Mandate(PowerOnModel):
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_visible": False, "frontend_required": False, "label": "ID"},
)
name: str = Field(
description="Unique stable mandate code (slug); lowercase, digits, hyphen segments only.",
min_length=MANDATE_NAME_MIN_LEN,
max_length=MANDATE_NAME_MAX_LEN,
pattern=r"^[a-z0-9]+(-[a-z0-9]+)*$",
json_schema_extra={
"frontend_type": "slug",
"frontend_readonly": False,
"frontend_required": True,
"label": "Kurzzeichen",
},
description="Name of the mandate",
json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": True, "label": "Name"},
)
label: str = Field(
description="Human-readable mandate name shown in the UI (Voller Name).",
min_length=1,
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": False,
"frontend_required": True,
"label": "Voller Name",
},
label: Optional[str] = Field(
default=None,
description="Display label of the mandate",
json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": False, "label": "Label"},
)
enabled: bool = Field(
default=True,
@ -132,30 +105,6 @@ class Mandate(PowerOnModel):
return False
return v
@field_validator("name", mode="before")
@classmethod
def _stripName(cls, v):
if v is None:
return ""
if isinstance(v, str):
return v.strip()
return v
@field_validator("label", mode="before")
@classmethod
def _coerceLabel(cls, v):
if v is None:
return ""
return v
@field_validator("label")
@classmethod
def _validateMandateLabel(cls, v: str) -> str:
s = v.strip()
if len(s) < 1:
raise ValueError("Mandate Voller Name (label) must not be empty.")
return s
@i18nModel("Benutzerverbindung")
class UserConnection(PowerOnModel):
id: str = Field(
@ -165,13 +114,7 @@ class UserConnection(PowerOnModel):
)
userId: str = Field(
description="ID of the user this connection belongs to",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Benutzer-ID",
"fk_target": {"db": "poweron_app", "table": "User"},
},
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "label": "Benutzer-ID"},
)
authority: AuthAuthority = Field(
description="Authentication authority",
@ -248,6 +191,7 @@ class UserConnection(PowerOnModel):
json_schema_extra={"frontend_type": "list", "frontend_readonly": True, "frontend_required": False, "label": "Gewährte Berechtigungen"},
)
@computed_field
@computed_field
@property
def connectionReference(self) -> str:
@ -275,11 +219,8 @@ class User(PowerOnModel):
Multi-Tenant Design:
- User gehört NICHT direkt zu einem Mandanten
- Zugehörigkeit wird über UserMandate gesteuert (siehe datamodelMembership.py)
- Rollen werden über UserMandateRole gesteuert (mandanten-scoped)
- Plattform-Autorität via zwei orthogonalen Flags:
* isSysAdmin Infrastruktur (Bypass der RBAC-Engine, KEIN
Cross-Mandate-Governance)
* isPlatformAdmin Cross-Mandate-Governance (KEIN RBAC-Bypass)
- Rollen werden über UserMandateRole gesteuert
- isSysAdmin = System-Zugriff, KEIN Daten-Zugriff
"""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
@ -337,15 +278,10 @@ class User(PowerOnModel):
isSysAdmin: bool = Field(
default=False,
description=(
"Infrastructure/System Operator flag. Erlaubt RBAC-Engine-Bypass "
"und Zugriff auf Infrastruktur-Operationen (Logs, Tokens, DB-Health, "
"i18n-Master, Registry). Gibt KEIN Cross-Mandate-Governance-Recht "
"(dafür ist isPlatformAdmin zuständig)."
),
description="Global SysAdmin flag. SysAdmin = System-Zugriff, KEIN Daten-Zugriff!",
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False, "label": "System-Admin"},
)
@field_validator('isSysAdmin', mode='before')
@classmethod
def _coerceIsSysAdmin(cls, v):
@ -353,25 +289,6 @@ class User(PowerOnModel):
if v is None:
return False
return v
isPlatformAdmin: bool = Field(
default=False,
description=(
"Platform/Cross-Mandate Governance flag. Erlaubt mandanten-übergreifende "
"Verwaltungsoperationen (User-/Mandate-/RBAC-/Feature-Registry). "
"KEIN RBAC-Engine-Bypass und KEIN impliziter Zugriff auf Mandanten-Daten."
),
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False, "label": "Plattform-Admin"},
)
@field_validator('isPlatformAdmin', mode='before')
@classmethod
def _coerceIsPlatformAdmin(cls, v):
"""Konvertiert None zu False (für bestehende DB-Einträge ohne isPlatformAdmin Feld)."""
if v is None:
return False
return v
authenticationAuthority: AuthAuthority = Field(
default=AuthAuthority.LOCAL,
@ -452,14 +369,11 @@ class UserVoicePreferences(PowerOnModel):
description="Primary key",
json_schema_extra={"label": "ID"},
)
userId: str = Field(
description="User ID",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
)
userId: str = Field(description="User ID", json_schema_extra={"label": "Benutzer-ID"})
mandateId: Optional[str] = Field(
default=None,
description="Mandate scope (None = global for user)",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
json_schema_extra={"label": "Mandanten-ID"},
)
sttLanguage: str = Field(
default="de-DE",

View file

@ -1,316 +0,0 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""Unified Document Model (UDM) — hierarchical document tree and ContentPart bridge."""
from __future__ import annotations
import uuid
from typing import Any, Dict, List, Literal, Optional, Tuple, Union
from pydantic import BaseModel, Field
from modules.datamodels.datamodelExtraction import ContentExtracted, ContentPart
class UdmMetadata(BaseModel):
title: Optional[str] = None
author: Optional[str] = None
createdAt: Optional[str] = None
modifiedAt: Optional[str] = None
sourcePath: str = ""
tags: List[str] = Field(default_factory=list)
custom: Dict[str, Any] = Field(default_factory=dict)
class UdmBoundingBox(BaseModel):
x: float = 0.0
y: float = 0.0
width: float = 0.0
height: float = 0.0
unit: Literal["px", "pt", "mm"] = "pt"
class UdmPosition(BaseModel):
index: int = 0
page: Optional[int] = None
row: Optional[int] = None
col: Optional[int] = None
bbox: Optional[UdmBoundingBox] = None
class UdmContentBlock(BaseModel):
id: str
contentType: Literal["text", "image", "table", "code", "media", "link", "formula"]
raw: str = ""
fileRef: Optional[str] = None
mimeType: Optional[str] = None
language: Optional[str] = None
attributes: Dict[str, Any] = Field(default_factory=dict)
position: UdmPosition = Field(default_factory=lambda: UdmPosition(index=0))
metadata: UdmMetadata = Field(default_factory=UdmMetadata)
class UdmStructuralNode(BaseModel):
id: str
role: Literal["page", "section", "slide", "sheet"]
index: int
label: Optional[str] = None
metadata: UdmMetadata = Field(default_factory=UdmMetadata)
children: List[UdmContentBlock] = Field(default_factory=list)
class UdmDocument(BaseModel):
id: str
role: Literal["document"] = "document"
sourceType: Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"] = "unknown"
sourcePath: str = ""
metadata: UdmMetadata = Field(default_factory=UdmMetadata)
children: List[UdmStructuralNode] = Field(default_factory=list)
class UdmArchive(BaseModel):
id: str
role: Literal["archive"] = "archive"
sourceType: Literal["zip", "tar", "gz", "unknown"] = "unknown"
sourcePath: str = ""
metadata: UdmMetadata = Field(default_factory=UdmMetadata)
children: List[Union[UdmArchive, UdmDocument]] = Field(default_factory=list)
def _newId() -> str:
return str(uuid.uuid4())
def _mapTypeGroupToContentType(typeGroup: str) -> Literal["text", "image", "table", "code", "media", "link", "formula"]:
if typeGroup == "image":
return "image"
if typeGroup == "table":
return "table"
if typeGroup in ("code",):
return "code"
if typeGroup in ("binary", "audiostream", "videostream"):
return "media"
if typeGroup in ("structure", "text", "container"):
return "text"
return "text"
def _contentPartToBlock(part: ContentPart, blockIndex: int) -> UdmContentBlock:
meta = part.metadata or {}
ctx = meta.get("contextRef") or {}
if not isinstance(ctx, dict):
ctx = {}
page = meta.get("pageIndex")
if page is None:
page = ctx.get("pageIndex")
slide = meta.get("slide_number")
if slide is None:
slide = ctx.get("slideIndex")
pos = UdmPosition(
index=blockIndex,
page=int(page) + 1 if isinstance(page, int) else None,
)
extraAttr: Dict[str, Any] = {}
if isinstance(slide, int):
extraAttr["slideIndex"] = slide
return UdmContentBlock(
id=part.id,
contentType=_mapTypeGroupToContentType(part.typeGroup),
raw=part.data or "",
mimeType=part.mimeType or None,
attributes={
"typeGroup": part.typeGroup,
"label": part.label,
"parentId": part.parentId,
**({"contextRef": ctx} if ctx else {}),
**extraAttr,
},
position=pos,
metadata=UdmMetadata(
sourcePath=meta.get("containerPath", "") or "",
custom={k: v for k, v in meta.items() if k not in ("contextRef",)},
),
)
def _groupKeyForPart(part: ContentPart) -> Tuple[str, int, str]:
"""Return (role, structural_index, label) for grouping parts into structural nodes."""
meta = part.metadata or {}
ctx = meta.get("contextRef") or {}
if not isinstance(ctx, dict):
ctx = {}
if "pageIndex" in meta or "pageIndex" in ctx:
pi = meta.get("pageIndex", ctx.get("pageIndex", 0))
try:
idx = int(pi)
except (TypeError, ValueError):
idx = 0
return ("page", idx, f"page_{idx + 1}")
if meta.get("slide_number") is not None:
try:
idx = int(meta["slide_number"]) - 1
except (TypeError, ValueError):
idx = 0
return ("slide", max(0, idx), f"slide_{idx + 1}")
if ctx.get("slideIndex") is not None:
try:
idx = int(ctx.get("slideIndex", 0))
except (TypeError, ValueError):
idx = 0
return ("slide", max(0, idx), f"slide_{idx + 1}")
if meta.get("sheet") or ctx.get("sheetName"):
name = str(meta.get("sheet") or ctx.get("sheetName") or "sheet")
return ("sheet", abs(hash(name)) % (10**9), name)
if ctx.get("sectionId") or meta.get("sectionId"):
sid = str(ctx.get("sectionId") or meta.get("sectionId") or "section")
return ("section", abs(hash(sid)) % (10**9), sid)
if part.typeGroup == "container":
return ("section", 0, "root")
return ("section", 0, "body")
_VALID_DOC_SOURCES = frozenset({"pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"})
def _contentPartsToUdm(extracted: ContentExtracted, sourceType: str, sourcePath: str) -> UdmDocument:
"""Convert flat ContentPart list into a UdmDocument using structural heuristics."""
parts = list(extracted.parts or [])
st: Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"] = (
sourceType if sourceType in _VALID_DOC_SOURCES else "unknown" # type: ignore[assignment]
)
doc = UdmDocument(
id=extracted.id or _newId(),
sourceType=st,
sourcePath=sourcePath,
metadata=UdmMetadata(sourcePath=sourcePath),
)
if not parts:
return doc
skipIds = set()
rootIds = set()
for p in parts:
if p.typeGroup == "container" and p.parentId is None:
rootIds.add(p.id)
skipIds.add(p.id)
contentParts = [p for p in parts if p.id not in skipIds and p.typeGroup != "container"]
if not contentParts:
for p in parts:
if p.id not in skipIds:
contentParts.append(p)
if not contentParts:
return doc
groups: Dict[Tuple[str, int, str], List[ContentPart]] = {}
for p in contentParts:
key = _groupKeyForPart(p)
groups.setdefault(key, []).append(p)
sortedKeys = sorted(groups.keys(), key=lambda k: (k[0], k[1], k[2]))
for gi, key in enumerate(sortedKeys):
role, structIdx, label = key
plist = groups[key]
node = UdmStructuralNode(
id=_newId(),
role=role if role in ("page", "section", "slide", "sheet") else "section",
index=gi if role == "section" else structIdx,
label=label,
metadata=UdmMetadata(sourcePath=sourcePath),
)
for bi, part in enumerate(plist):
node.children.append(_contentPartToBlock(part, bi))
doc.children.append(node)
return doc
def _udmToContentParts(document: UdmDocument) -> ContentExtracted:
"""Flatten UdmDocument back to ContentExtracted for backward compatibility."""
rootId = _newId()
parts: List[ContentPart] = [
ContentPart(
id=rootId,
parentId=None,
label=document.sourceType or "document",
typeGroup="container",
mimeType="application/octet-stream",
data="",
metadata={"udmRoot": True, "sourcePath": document.sourcePath},
)
]
for sn in document.children:
for block in sn.children:
meta = dict(block.metadata.custom) if block.metadata else {}
meta.setdefault("structuralRole", sn.role)
meta.setdefault("structuralIndex", sn.index)
parts.append(
ContentPart(
id=block.id,
parentId=rootId,
label=block.attributes.get("label", sn.label or ""),
typeGroup=str(block.attributes.get("typeGroup", "text")),
mimeType=block.mimeType or "text/plain",
data=block.raw,
metadata=meta,
)
)
return ContentExtracted(id=document.id, parts=parts)
def _stripUdmRaw(udm: UdmDocument) -> UdmDocument:
"""Return a deep copy with all content block `raw` cleared (structure-only preview)."""
clone = udm.model_copy(deep=True)
for sn in clone.children:
for block in sn.children:
block.raw = ""
return clone
def _stripUdmForReferences(udm: UdmDocument) -> UdmDocument:
"""Clear inline payloads; keep `fileRef` when already set in attributes/metadata."""
clone = udm.model_copy(deep=True)
for sn in clone.children:
for block in sn.children:
block.raw = ""
if not block.fileRef:
ref = block.attributes.get("fileRef")
if block.metadata and block.metadata.custom:
ref = ref or block.metadata.custom.get("fileRef")
if isinstance(ref, str) and ref:
block.fileRef = ref
return clone
def _applyUdmOutputDetail(udm: UdmDocument, detail: str) -> UdmDocument:
if detail == "structure":
return _stripUdmRaw(udm)
if detail == "references":
return _stripUdmForReferences(udm)
return udm
def _mimeToUdmSourceType(mimeType: str, fileName: str) -> Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"]:
m = (mimeType or "").lower()
fn = (fileName or "").lower()
if m == "application/pdf" or fn.endswith(".pdf"):
return "pdf"
if "wordprocessingml" in m or fn.endswith(".docx"):
return "docx"
if "presentationml" in m or fn.endswith((".pptx", ".ppt")):
return "pptx"
if "spreadsheetml" in m or fn.endswith((".xlsx", ".xlsm")):
return "xlsx"
if m == "text/html" or fn.endswith((".html", ".htm")):
return "html"
if m == "application/octet-stream" or not m:
return "binary"
return "unknown"

View file

@ -22,13 +22,7 @@ class Prompt(PowerOnModel):
mandateId: str = Field(
default="",
description="ID of the mandate this prompt belongs to",
json_schema_extra={
"label": "Mandanten-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
json_schema_extra={"label": "Mandanten-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
)
isSystem: bool = Field(
default=False,

View file

@ -77,7 +77,7 @@ class InvestorDemo2026(_BaseDemoConfig):
mandateIdAlpina = self._ensureMandate(db, _MANDATE_ALPINA, summary)
userId = self._ensureUser(db, summary)
self._ensurePlatformAdminFlag(db, userId, summary)
self._ensureRootMandateSysAdminRole(db, userId, summary)
if mandateIdHappy:
self._ensureMembership(db, userId, mandateIdHappy, _MANDATE_HAPPYLIFE["label"], summary)
@ -195,24 +195,47 @@ class InvestorDemo2026(_BaseDemoConfig):
summary["created"].append(f"User {_USER['fullName']}")
return uid
def _ensurePlatformAdminFlag(self, db, userId: str, summary: Dict):
"""Ensure the demo user has isPlatformAdmin=True for cross-mandate governance.
Without this, the admin UI menus would be hidden."""
from modules.datamodels.datamodelUam import UserInDB
def _ensureRootMandateSysAdminRole(self, db, userId: str, summary: Dict):
"""Ensure the demo user is member of the root mandate with the sysadmin role.
Without this, hasSysAdminRole returns False and admin menus are hidden."""
from modules.datamodels.datamodelUam import Mandate
from modules.datamodels.datamodelMembership import UserMandate, UserMandateRole
from modules.datamodels.datamodelRbac import Role
existing = db.getRecord(UserInDB, userId)
if not existing:
summary["errors"].append(f"Demo user {userId} not found — cannot set isPlatformAdmin")
rootMandates = db.getRecordset(Mandate, recordFilter={"name": "root", "isSystem": True})
if not rootMandates:
summary["errors"].append("Root mandate not found — cannot assign sysadmin role")
return
currentFlag = bool(existing.get("isPlatformAdmin", False)) if isinstance(existing, dict) else bool(getattr(existing, "isPlatformAdmin", False))
if currentFlag:
summary["skipped"].append("isPlatformAdmin already set")
rootMandateId = rootMandates[0].get("id")
existing = db.getRecordset(UserMandate, recordFilter={"userId": userId, "mandateId": rootMandateId})
if existing:
userMandateId = existing[0].get("id")
else:
um = UserMandate(userId=userId, mandateId=rootMandateId, enabled=True)
created = db.recordCreate(UserMandate, um)
userMandateId = created.get("id")
summary["created"].append("Membership -> root mandate")
logger.info(f"Created root mandate membership for {_USER['username']}")
sysadminRoles = db.getRecordset(Role, recordFilter={"mandateId": rootMandateId, "roleLabel": "sysadmin"})
if not sysadminRoles:
summary["errors"].append("sysadmin role not found in root mandate")
return
db.recordModify(UserInDB, userId, {"isPlatformAdmin": True})
summary["created"].append("isPlatformAdmin flag")
logger.info(f"Set isPlatformAdmin=True for {_USER['username']}")
sysadminRoleId = sysadminRoles[0].get("id")
existingRole = db.getRecordset(UserMandateRole, recordFilter={
"userMandateId": userMandateId,
"roleId": sysadminRoleId,
})
if not existingRole:
umr = UserMandateRole(userMandateId=userMandateId, roleId=sysadminRoleId)
db.recordCreate(UserMandateRole, umr)
summary["created"].append("SysAdmin role in root mandate")
logger.info(f"Assigned sysadmin role in root mandate for {_USER['username']}")
else:
summary["skipped"].append("SysAdmin role in root mandate exists")
def _ensureMembership(self, db, userId: str, mandateId: str, mandateLabel: str, summary: Dict):
from modules.datamodels.datamodelMembership import UserMandate, UserMandateRole

View file

@ -21,12 +21,8 @@ from modules.datamodels.datamodelUam import AccessLevel
from modules.datamodels.datamodelChat import UserInputRequest
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.dbRegistry import registerDatabase
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
chatbotDatabase = "poweron_chatbot"
registerDatabase(chatbotDatabase)
# =============================================================================
# Chatbot-specific Pydantic models for poweron_chatbot (per-instance isolation)
# =============================================================================
@ -396,7 +392,7 @@ class ChatObjects:
try:
# Get configuration values with defaults
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = chatbotDatabase
dbDatabase = "poweron_chatbot"
dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -116,18 +116,11 @@ TEMPLATE_ROLES = [
def getFeatureDefinition() -> Dict[str, Any]:
"""Return the feature definition for registration.
The chatbot feature is currently soft-disabled via ``enabled=False``: its
catalog objects, template roles and routes stay loaded so already-running
instances keep working, but it is filtered out of the Store and the
Admin Feature-Instances "Neue Instanz" selection list.
"""
"""Return the feature definition for registration."""
return {
"code": FEATURE_CODE,
"label": FEATURE_LABEL,
"icon": FEATURE_ICON,
"enabled": False,
}

View file

@ -102,7 +102,7 @@ def _validateInstanceAccess(instanceId: str, context: RequestContext) -> str:
)
# Verify user has access to this instance
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
# Check if user has FeatureAccess for this instance
featureAccesses = rootInterface.getFeatureAccessesForUser(str(context.user.id))
hasAccess = any(

View file

@ -11,7 +11,6 @@ from typing import Dict, Any, List, Optional
from modules.datamodels.datamodelUam import User
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.dbRegistry import registerDatabase
from modules.shared.timeUtils import getIsoTimestamp
from modules.shared.configuration import APP_CONFIG
from modules.shared.i18nRegistry import resolveText, t
@ -27,9 +26,6 @@ from .datamodelCommcoach import (
logger = logging.getLogger(__name__)
commcoachDatabase = "poweron_commcoach"
registerDatabase(commcoachDatabase)
_interfaces = {}
@ -55,7 +51,7 @@ class CommcoachObjects:
self.userId = str(currentUser.id) if currentUser else "system"
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = commcoachDatabase
dbDatabase = "poweron_commcoach"
dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -150,7 +150,7 @@ async def checkAndAwardBadges(interface, userId: str, mandateId: str, instanceId
except Exception:
allContexts = []
completedTasks = interface.getCompletedTaskCount(userId, instanceId) if hasattr(interface, 'getCompletedTaskCount') else 0
completedTasks = interface.getCompletedTaskCount(userId) if hasattr(interface, 'getCompletedTaskCount') else 0
if completedTasks >= 10:
badgesToCheck.append(("task_completer", True))

View file

@ -101,51 +101,6 @@ BUILTIN_PERSONAS: List[Dict[str, Any]] = [
"gender": "m",
"category": "builtin",
},
# --- Immobilien / Liegenschaftsverwaltung (PWG-Kontext) ---
{
"key": "tenant_payment_arrears_m",
"label": "Mieter mit Zahlungsrückstand",
"description": "René Bachmann, Mieter einer 3.5-Zimmer-Wohnung. Seit drei Monaten im Mietrückstand, hat zwei Mahnungen "
"erhalten und ist genervt vom Druck. Fühlt sich ungerecht behandelt, verweist auf persönliche Schwierigkeiten "
"(Jobverlust, Scheidung). Reagiert defensiv und gereizt auf Forderungen. Braucht empathisches Gegenüber, "
"das gleichzeitig klar die Zahlungspflicht kommuniziert. Kann sich auf eine Ratenzahlung einlassen, "
"wenn er sich respektiert fühlt und einen konkreten Plan sieht.",
"gender": "m",
"category": "builtin",
},
{
"key": "tenant_utility_costs_f",
"label": "Mieterin mit Nebenkostenfragen",
"description": "Fatima El-Amin, Mieterin seit vier Jahren. Hat die jährliche Nebenkostenabrechnung erhalten und versteht "
"mehrere Positionen nicht (Hauswartung, Allgemeinstrom, Verwaltungskosten). Emotional aufgebracht, weil die "
"Nachzahlung unerwartet hoch ist. Vermutet Fehler oder unfaire Verteilung. Spricht schnell und unterbricht. "
"Braucht geduldige, verständliche Erklärungen ohne Fachjargon. Beruhigt sich, wenn man Positionen einzeln "
"durchgeht und auf die Rechtsgrundlage (Mietvertrag, Nebenkosten-Verordnung) verweist.",
"gender": "f",
"category": "builtin",
},
{
"key": "new_tenant_move_in_m",
"label": "Neuer Mieter (Einzug)",
"description": "Luca Steiner, zieht nächste Woche in seine erste eigene Wohnung ein. Aufgeregt aber unsicher — hat viele "
"Fragen zu Wohnungsübergabe, Schlüsselabholung, Hausordnung, Kautionseinzahlung und Anmeldung bei Werken "
"(Strom, Internet). Höflich und kooperativ, braucht aber klare, schrittweise Informationen. Fragt mehrfach "
"nach, wenn etwas unklar ist. Reagiert sehr positiv auf eine willkommene, strukturierte Begleitung.",
"gender": "m",
"category": "builtin",
},
{
"key": "difficult_neighbor_noise_m",
"label": "Nachbar mit Lärmbeschwerde",
"description": "Kurt Zürcher, langjähriger Mieter im Erdgeschoss. Beschwert sich massiv über Lärm aus der Wohnung darüber "
"(Musik abends, Kindergetrampel, Waschmaschine nach 22 Uhr). Hat bereits ein Lärmprotokoll geführt und "
"droht mit Mietminderung und Anwalt. Spricht laut, ist aufgebracht und fühlt sich von der Verwaltung "
"nicht ernst genommen. Erwartet sofortige Massnahmen. Kann deeskaliert werden, wenn man sein Anliegen "
"ernst nimmt, konkrete nächste Schritte aufzeigt (Gespräch mit Nachbar, schriftliche Verwarnung) und "
"auf die Hausordnung sowie seine Rechte und Pflichten verweist.",
"gender": "m",
"category": "builtin",
},
]

View file

@ -71,7 +71,6 @@ class AutoWorkflow(PowerOnModel):
"frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label",
"fk_model": "Mandate",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
)
featureInstanceId: str = Field(
@ -84,7 +83,6 @@ class AutoWorkflow(PowerOnModel):
"frontend_fk_source": "/api/features/instances",
"frontend_fk_display_field": "label",
"fk_model": "FeatureInstance",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
)
label: str = Field(
@ -109,13 +107,7 @@ class AutoWorkflow(PowerOnModel):
templateSourceId: Optional[str] = Field(
default=None,
description="ID of the template this workflow was created from",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Vorlagen-Quelle",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
},
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "label": "Vorlagen-Quelle"},
)
templateScope: Optional[str] = Field(
default=None,
@ -130,13 +122,7 @@ class AutoWorkflow(PowerOnModel):
currentVersionId: Optional[str] = Field(
default=None,
description="ID of the currently published AutoVersion",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Aktuelle Version",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion"},
},
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "label": "Aktuelle Version"},
)
active: bool = Field(
default=True,
@ -179,13 +165,7 @@ class AutoVersion(PowerOnModel):
)
workflowId: str = Field(
description="FK -> AutoWorkflow",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"label": "Workflow-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
},
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True, "label": "Workflow-ID"},
)
versionNumber: int = Field(
default=1,
@ -215,13 +195,7 @@ class AutoVersion(PowerOnModel):
publishedBy: Optional[str] = Field(
default=None,
description="User ID who published this version",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Veröffentlicht von",
"fk_target": {"db": "poweron_app", "table": "User"},
},
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "label": "Veröffentlicht von"},
)
@ -238,13 +212,7 @@ class AutoRun(PowerOnModel):
)
workflowId: str = Field(
description="Workflow ID",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"label": "Workflow-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
},
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True, "label": "Workflow-ID"},
)
label: Optional[str] = Field(
default=None,
@ -262,30 +230,17 @@ class AutoRun(PowerOnModel):
"frontend_fk_source": "/api/mandates/",
"frontend_fk_display_field": "label",
"fk_model": "Mandate",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
)
ownerId: Optional[str] = Field(
default=None,
description="User ID who triggered this run",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Auslöser",
"fk_target": {"db": "poweron_app", "table": "User"},
},
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "label": "Auslöser"},
)
versionId: Optional[str] = Field(
default=None,
description="AutoVersion ID used for this run",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Versions-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion"},
},
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "label": "Versions-ID"},
)
status: str = Field(
default=AutoRunStatus.RUNNING.value,
@ -352,13 +307,7 @@ class AutoStepLog(PowerOnModel):
)
runId: str = Field(
description="FK -> AutoRun",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"label": "Lauf-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun"},
},
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True, "label": "Lauf-ID"},
)
nodeId: str = Field(
description="Node ID in the graph",
@ -428,23 +377,11 @@ class AutoTask(PowerOnModel):
)
runId: str = Field(
description="FK -> AutoRun",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"label": "Lauf-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun"},
},
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True, "label": "Lauf-ID"},
)
workflowId: str = Field(
description="Workflow ID",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"label": "Workflow-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
},
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True, "label": "Workflow-ID"},
)
nodeId: str = Field(
description="Node ID in the graph",
@ -462,13 +399,7 @@ class AutoTask(PowerOnModel):
assigneeId: Optional[str] = Field(
default=None,
description="User ID assigned to complete the task",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": False,
"frontend_required": False,
"label": "Zugewiesen an",
"fk_target": {"db": "poweron_app", "table": "User"},
},
json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": False, "label": "Zugewiesen an"},
)
status: str = Field(
default=AutoTaskStatus.PENDING.value,

View file

@ -38,12 +38,10 @@ from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import (
from modules.features.graphicalEditor.entryPoints import normalize_invocations_list
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
logger = logging.getLogger(__name__)
graphicalEditorDatabase = "poweron_graphicaleditor"
registerDatabase(graphicalEditorDatabase)
_GREENFIELD_DB = "poweron_graphicaleditor"
_CALLBACK_WORKFLOW_CHANGED = "graphicalEditor.workflow.changed"
@ -70,7 +68,7 @@ def getAllWorkflowsForScheduling() -> List[Dict[str, Any]]:
Used by the scheduler to register cron jobs. Does not filter by mandate/instance.
"""
dbHost = APP_CONFIG.get("DB_HOST", "localhost")
dbDatabase = graphicalEditorDatabase
dbDatabase = _GREENFIELD_DB
dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
@ -157,7 +155,7 @@ class GraphicalEditorObjects:
def _init_db(self):
"""Initialize database connection to poweron_graphicaleditor (Greenfield)."""
dbHost = APP_CONFIG.get("DB_HOST", "localhost")
dbDatabase = graphicalEditorDatabase
dbDatabase = _GREENFIELD_DB
dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
@ -176,11 +174,12 @@ class GraphicalEditorObjects:
# -------------------------------------------------------------------------
def getWorkflows(self, active: Optional[bool] = None) -> List[Dict[str, Any]]:
"""Get all workflows for this mandate (cross-instance)."""
"""Get all workflows for this mandate and feature instance."""
if not self.db._ensureTableExists(Automation2Workflow):
return []
rf: Dict[str, Any] = {
"mandateId": self.mandateId,
"featureInstanceId": self.featureInstanceId,
}
if active is not None:
rf["active"] = active
@ -194,7 +193,7 @@ class GraphicalEditorObjects:
return rows
def getWorkflow(self, workflowId: str) -> Optional[Dict[str, Any]]:
"""Get a single workflow by ID (mandate-scoped, cross-instance)."""
"""Get a single workflow by ID."""
if not self.db._ensureTableExists(Automation2Workflow):
return None
records = self.db.getRecordset(
@ -202,6 +201,7 @@ class GraphicalEditorObjects:
recordFilter={
"id": workflowId,
"mandateId": self.mandateId,
"featureInstanceId": self.featureInstanceId,
},
)
if not records:

View file

@ -11,7 +11,6 @@ from .clickup import CLICKUP_NODES
from .file import FILE_NODES
from .trustee import TRUSTEE_NODES
from .data import DATA_NODES
from .context import CONTEXT_NODES
STATIC_NODE_TYPES = (
TRIGGER_NODES
@ -24,5 +23,4 @@ STATIC_NODE_TYPES = (
+ FILE_NODES
+ TRUSTEE_NODES
+ DATA_NODES
+ CONTEXT_NODES
)

View file

@ -26,7 +26,7 @@ AI_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "AiResult", "TextResult", "Transit"]}},
"outputPorts": {0: {"schema": "AiResult"}},
"meta": {"icon": "mdi-robot", "color": "#9C27B0", "usesAi": True},
"meta": {"icon": "mdi-robot", "color": "#9C27B0"},
"_method": "ai",
"_action": "process",
},
@ -43,7 +43,7 @@ AI_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "AiResult"}},
"meta": {"icon": "mdi-magnify", "color": "#9C27B0", "usesAi": True},
"meta": {"icon": "mdi-magnify", "color": "#9C27B0"},
"_method": "ai",
"_action": "webResearch",
},
@ -61,7 +61,7 @@ AI_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "AiResult"}},
"meta": {"icon": "mdi-file-document-outline", "color": "#9C27B0", "usesAi": True},
"meta": {"icon": "mdi-file-document-outline", "color": "#9C27B0"},
"_method": "ai",
"_action": "summarizeDocument",
},
@ -79,7 +79,7 @@ AI_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "AiResult"}},
"meta": {"icon": "mdi-translate", "color": "#9C27B0", "usesAi": True},
"meta": {"icon": "mdi-translate", "color": "#9C27B0"},
"_method": "ai",
"_action": "translateDocument",
},
@ -97,7 +97,7 @@ AI_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "DocumentList"}},
"meta": {"icon": "mdi-file-convert", "color": "#9C27B0", "usesAi": True},
"meta": {"icon": "mdi-file-convert", "color": "#9C27B0"},
"_method": "ai",
"_action": "convertDocument",
},
@ -114,7 +114,7 @@ AI_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "DocumentList"}},
"meta": {"icon": "mdi-file-plus", "color": "#9C27B0", "usesAi": True},
"meta": {"icon": "mdi-file-plus", "color": "#9C27B0"},
"_method": "ai",
"_action": "generateDocument",
},
@ -134,28 +134,8 @@ AI_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "AiResult"}},
"meta": {"icon": "mdi-code-tags", "color": "#9C27B0", "usesAi": True},
"meta": {"icon": "mdi-code-tags", "color": "#9C27B0"},
"_method": "ai",
"_action": "generateCode",
},
{
"id": "ai.consolidate",
"category": "ai",
"label": t("KI-Konsolidierung"),
"description": t("Gesammelte Ergebnisse mit KI zusammenfassen, klassifizieren oder semantisch zusammenführen"),
"parameters": [
{"name": "mode", "type": "string", "required": False, "frontendType": "select",
"frontendOptions": {"options": ["summarize", "classify", "semanticMerge"]},
"description": t("Konsolidierungsmodus"), "default": "summarize"},
{"name": "prompt", "type": "string", "required": False, "frontendType": "textarea",
"description": t("Optionaler Prompt für die Konsolidierung"), "default": ""},
],
"inputs": 1,
"outputs": 1,
"inputPorts": {0: {"accepts": ["AggregateResult", "Transit"]}},
"outputPorts": {0: {"schema": "ConsolidateResult"}},
"meta": {"icon": "mdi-table-merge-cells", "color": "#9C27B0", "usesAi": True},
"_method": "ai",
"_action": "consolidate",
},
]

View file

@ -33,7 +33,7 @@ CLICKUP_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "TaskList"}},
"meta": {"icon": "mdi-magnify", "color": "#7B68EE", "usesAi": False},
"meta": {"icon": "mdi-magnify", "color": "#7B68EE"},
"_method": "clickup",
"_action": "searchTasks",
},
@ -57,7 +57,7 @@ CLICKUP_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "TaskList"}},
"meta": {"icon": "mdi-format-list-bulleted", "color": "#7B68EE", "usesAi": False},
"meta": {"icon": "mdi-format-list-bulleted", "color": "#7B68EE"},
"_method": "clickup",
"_action": "listTasks",
},
@ -78,7 +78,7 @@ CLICKUP_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "TaskResult"}},
"meta": {"icon": "mdi-file-document-outline", "color": "#7B68EE", "usesAi": False},
"meta": {"icon": "mdi-file-document-outline", "color": "#7B68EE"},
"_method": "clickup",
"_action": "getTask",
},
@ -123,7 +123,7 @@ CLICKUP_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "TaskResult"}},
"meta": {"icon": "mdi-plus-circle-outline", "color": "#7B68EE", "usesAi": False},
"meta": {"icon": "mdi-plus-circle-outline", "color": "#7B68EE"},
"_method": "clickup",
"_action": "createTask",
},
@ -148,7 +148,7 @@ CLICKUP_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["TaskResult", "Transit"]}},
"outputPorts": {0: {"schema": "TaskResult"}},
"meta": {"icon": "mdi-pencil-outline", "color": "#7B68EE", "usesAi": False},
"meta": {"icon": "mdi-pencil-outline", "color": "#7B68EE"},
"_method": "clickup",
"_action": "updateTask",
},
@ -171,7 +171,7 @@ CLICKUP_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-attachment", "color": "#7B68EE", "usesAi": False},
"meta": {"icon": "mdi-attachment", "color": "#7B68EE"},
"_method": "clickup",
"_action": "uploadAttachment",
},

View file

@ -1,30 +0,0 @@
# Copyright (c) 2025 Patrick Motsch
# Context node definitions — structural extraction without AI.
from modules.shared.i18nRegistry import t
CONTEXT_NODES = [
{
"id": "context.extractContent",
"category": "context",
"label": t("Inhalt extrahieren"),
"description": t("Dokumentstruktur extrahieren ohne KI (Seiten, Abschnitte, Bilder, Tabellen)"),
"parameters": [
{"name": "outputDetail", "type": "string", "required": False, "frontendType": "select",
"frontendOptions": {"options": ["full", "structure", "references"]},
"description": t("Detailgrad: full = alles, structure = Skelett, references = Dateireferenzen"),
"default": "full"},
{"name": "includeImages", "type": "boolean", "required": False, "frontendType": "checkbox",
"description": t("Bilder extrahieren"), "default": True},
{"name": "includeTables", "type": "boolean", "required": False, "frontendType": "checkbox",
"description": t("Tabellen extrahieren"), "default": True},
],
"inputs": 1,
"outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "UdmDocument"}},
"meta": {"icon": "mdi-file-tree-outline", "color": "#00897B", "usesAi": False},
"_method": "context",
"_action": "extractContent",
},
]

View file

@ -19,7 +19,7 @@ DATA_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "AggregateResult"}},
"executor": "data",
"meta": {"icon": "mdi-playlist-plus", "color": "#607D8B", "usesAi": False},
"meta": {"icon": "mdi-playlist-plus", "color": "#607D8B"},
},
{
"id": "data.transform",
@ -35,7 +35,7 @@ DATA_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "ActionResult", "dynamic": True, "deriveFrom": "mappings"}},
"executor": "data",
"meta": {"icon": "mdi-swap-horizontal-bold", "color": "#607D8B", "usesAi": False},
"meta": {"icon": "mdi-swap-horizontal-bold", "color": "#607D8B"},
},
{
"id": "data.filter",
@ -45,34 +45,12 @@ DATA_NODES = [
"parameters": [
{"name": "condition", "type": "string", "required": True, "frontendType": "filterExpression",
"description": t("Filterbedingung")},
{"name": "udmContentType", "type": "string", "required": False, "frontendType": "select",
"frontendOptions": {"options": ["", "text", "image", "table", "code", "media", "link", "formula"]},
"description": t("UDM-ContentType-Filter (optional, leer = kein UDM-Filter)"), "default": ""},
],
"inputs": 1,
"outputs": 1,
"inputPorts": {0: {"accepts": ["AggregateResult", "FileList", "TaskList", "EmailList", "DocumentList", "UdmDocument", "UdmNodeList"]}},
"inputPorts": {0: {"accepts": ["AggregateResult", "FileList", "TaskList", "EmailList", "DocumentList"]}},
"outputPorts": {0: {"schema": "Transit"}},
"executor": "data",
"meta": {"icon": "mdi-filter-outline", "color": "#607D8B", "usesAi": False},
},
{
"id": "data.consolidate",
"category": "data",
"label": t("Konsolidieren"),
"description": t("Gesammelte Ergebnisse deterministisch zusammenführen (Tabelle, CSV, Merge)"),
"parameters": [
{"name": "mode", "type": "string", "required": False, "frontendType": "select",
"frontendOptions": {"options": ["table", "concat", "merge", "csvJoin"]},
"description": t("Konsolidierungsmodus"), "default": "table"},
{"name": "separator", "type": "string", "required": False, "frontendType": "text",
"description": t("Trennzeichen (für concat/csvJoin)"), "default": "\n"},
],
"inputs": 1,
"outputs": 1,
"inputPorts": {0: {"accepts": ["AggregateResult", "Transit"]}},
"outputPorts": {0: {"schema": "ConsolidateResult"}},
"executor": "data",
"meta": {"icon": "mdi-table-merge-cells", "color": "#607D8B", "usesAi": False},
"meta": {"icon": "mdi-filter-outline", "color": "#607D8B"},
},
]

View file

@ -29,7 +29,7 @@ EMAIL_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "EmailList"}},
"meta": {"icon": "mdi-email-check", "color": "#1976D2", "usesAi": False},
"meta": {"icon": "mdi-email-check", "color": "#1976D2"},
"_method": "outlook",
"_action": "readEmails",
},
@ -64,7 +64,7 @@ EMAIL_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "EmailList"}},
"meta": {"icon": "mdi-email-search", "color": "#1976D2", "usesAi": False},
"meta": {"icon": "mdi-email-search", "color": "#1976D2"},
"_method": "outlook",
"_action": "searchEmails",
},
@ -87,7 +87,7 @@ EMAIL_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["EmailDraft", "AiResult", "Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-email-edit", "color": "#1976D2", "usesAi": False},
"meta": {"icon": "mdi-email-edit", "color": "#1976D2"},
"_method": "outlook",
"_action": "composeAndDraftEmailWithContext",
},

View file

@ -30,7 +30,7 @@ FILE_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["AiResult", "TextResult", "Transit"]}},
"outputPorts": {0: {"schema": "DocumentList"}},
"meta": {"icon": "mdi-file-plus-outline", "color": "#2196F3", "usesAi": False},
"meta": {"icon": "mdi-file-plus-outline", "color": "#2196F3"},
"_method": "file",
"_action": "create",
},

View file

@ -24,7 +24,7 @@ FLOW_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "Transit"}, 1: {"schema": "Transit"}},
"executor": "flow",
"meta": {"icon": "mdi-source-branch", "color": "#FF9800", "usesAi": False},
"meta": {"icon": "mdi-source-branch", "color": "#FF9800"},
},
{
"id": "flow.switch",
@ -52,13 +52,13 @@ FLOW_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "Transit"}},
"executor": "flow",
"meta": {"icon": "mdi-swap-horizontal", "color": "#FF9800", "usesAi": False},
"meta": {"icon": "mdi-swap-horizontal", "color": "#FF9800"},
},
{
"id": "flow.loop",
"category": "flow",
"label": t("Schleife / Für Jedes"),
"description": t("Über Array-Elemente oder UDM-Strukturebenen iterieren"),
"description": t("Über Array-Elemente iterieren"),
"parameters": [
{
"name": "items",
@ -67,37 +67,19 @@ FLOW_NODES = [
"frontendType": "text",
"description": t("Pfad zum Array"),
},
{
"name": "level",
"type": "string",
"required": False,
"frontendType": "select",
"frontendOptions": {"options": ["auto", "documents", "structuralNodes", "contentBlocks"]},
"description": t("UDM-Iterationsebene"),
"default": "auto",
},
{
"name": "concurrency",
"type": "number",
"required": False,
"frontendType": "number",
"frontendOptions": {"min": 1, "max": 20},
"description": t("Parallele Iterationen (1 = sequentiell)"),
"default": 1,
},
],
"inputs": 1,
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit", "UdmDocument"]}},
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "LoopItem"}},
"executor": "flow",
"meta": {"icon": "mdi-repeat", "color": "#FF9800", "usesAi": False},
"meta": {"icon": "mdi-repeat", "color": "#FF9800"},
},
{
"id": "flow.merge",
"category": "flow",
"label": t("Zusammenführen"),
"description": t("Mehrere Zweige zusammenführen (2-5 Eingänge)"),
"description": t("Mehrere Zweige zusammenführen"),
"parameters": [
{
"name": "mode",
@ -108,21 +90,12 @@ FLOW_NODES = [
"description": t("Zusammenführungsmodus"),
"default": "first",
},
{
"name": "inputCount",
"type": "number",
"required": False,
"frontendType": "number",
"frontendOptions": {"min": 2, "max": 5},
"description": t("Anzahl Eingänge"),
"default": 2,
},
],
"inputs": 2,
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}, 1: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "MergeResult"}},
"executor": "flow",
"meta": {"icon": "mdi-call-merge", "color": "#FF9800", "usesAi": False},
"meta": {"icon": "mdi-call-merge", "color": "#FF9800"},
},
]

View file

@ -24,7 +24,7 @@ INPUT_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "FormPayload", "dynamic": True, "deriveFrom": "fields"}},
"executor": "input",
"meta": {"icon": "mdi-form-textbox", "color": "#9C27B0", "usesAi": False},
"meta": {"icon": "mdi-form-textbox", "color": "#9C27B0"},
},
{
"id": "input.approval",
@ -45,7 +45,7 @@ INPUT_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "BoolResult"}},
"executor": "input",
"meta": {"icon": "mdi-check-decagram", "color": "#4CAF50", "usesAi": False},
"meta": {"icon": "mdi-check-decagram", "color": "#4CAF50"},
},
{
"id": "input.upload",
@ -68,7 +68,7 @@ INPUT_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "DocumentList"}},
"executor": "input",
"meta": {"icon": "mdi-upload", "color": "#2196F3", "usesAi": False},
"meta": {"icon": "mdi-upload", "color": "#2196F3"},
},
{
"id": "input.comment",
@ -86,7 +86,7 @@ INPUT_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "TextResult"}},
"executor": "input",
"meta": {"icon": "mdi-comment-text", "color": "#FF9800", "usesAi": False},
"meta": {"icon": "mdi-comment-text", "color": "#FF9800"},
},
{
"id": "input.review",
@ -105,7 +105,7 @@ INPUT_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "BoolResult"}},
"executor": "input",
"meta": {"icon": "mdi-magnify-scan", "color": "#673AB7", "usesAi": False},
"meta": {"icon": "mdi-magnify-scan", "color": "#673AB7"},
},
{
"id": "input.selection",
@ -123,7 +123,7 @@ INPUT_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "TextResult"}},
"executor": "input",
"meta": {"icon": "mdi-format-list-checks", "color": "#009688", "usesAi": False},
"meta": {"icon": "mdi-format-list-checks", "color": "#009688"},
},
{
"id": "input.confirmation",
@ -143,6 +143,6 @@ INPUT_NODES = [
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "BoolResult"}},
"executor": "input",
"meta": {"icon": "mdi-checkbox-marked-circle", "color": "#8BC34A", "usesAi": False},
"meta": {"icon": "mdi-checkbox-marked-circle", "color": "#8BC34A"},
},
]

View file

@ -23,7 +23,7 @@ SHAREPOINT_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "FileList"}},
"meta": {"icon": "mdi-file-search", "color": "#0078D4", "usesAi": False},
"meta": {"icon": "mdi-file-search", "color": "#0078D4"},
"_method": "sharepoint",
"_action": "findDocumentPath",
},
@ -43,7 +43,7 @@ SHAREPOINT_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["FileList", "Transit"]}},
"outputPorts": {0: {"schema": "DocumentList"}},
"meta": {"icon": "mdi-file-document", "color": "#0078D4", "usesAi": False},
"meta": {"icon": "mdi-file-document", "color": "#0078D4"},
"_method": "sharepoint",
"_action": "readDocuments",
},
@ -63,7 +63,7 @@ SHAREPOINT_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-upload", "color": "#0078D4", "usesAi": False},
"meta": {"icon": "mdi-upload", "color": "#0078D4"},
"_method": "sharepoint",
"_action": "uploadFile",
},
@ -83,7 +83,7 @@ SHAREPOINT_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "FileList"}},
"meta": {"icon": "mdi-folder-open", "color": "#0078D4", "usesAi": False},
"meta": {"icon": "mdi-folder-open", "color": "#0078D4"},
"_method": "sharepoint",
"_action": "listDocuments",
},
@ -103,7 +103,7 @@ SHAREPOINT_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["FileList", "Transit"]}},
"outputPorts": {0: {"schema": "DocumentList"}},
"meta": {"icon": "mdi-download", "color": "#0078D4", "usesAi": False},
"meta": {"icon": "mdi-download", "color": "#0078D4"},
"_method": "sharepoint",
"_action": "downloadFileByPath",
},
@ -126,7 +126,7 @@ SHAREPOINT_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-content-copy", "color": "#0078D4", "usesAi": False},
"meta": {"icon": "mdi-content-copy", "color": "#0078D4"},
"_method": "sharepoint",
"_action": "copyFile",
},

View file

@ -15,7 +15,7 @@ TRIGGER_NODES = [
"inputPorts": {},
"outputPorts": {0: {"schema": "ActionResult"}},
"executor": "trigger",
"meta": {"icon": "mdi-play", "color": "#4CAF50", "usesAi": False},
"meta": {"icon": "mdi-play", "color": "#4CAF50"},
},
{
"id": "trigger.form",
@ -36,7 +36,7 @@ TRIGGER_NODES = [
"inputPorts": {},
"outputPorts": {0: {"schema": "FormPayload", "dynamic": True, "deriveFrom": "formFields"}},
"executor": "trigger",
"meta": {"icon": "mdi-form-select", "color": "#9C27B0", "usesAi": False},
"meta": {"icon": "mdi-form-select", "color": "#9C27B0"},
},
{
"id": "trigger.schedule",
@ -57,6 +57,6 @@ TRIGGER_NODES = [
"inputPorts": {},
"outputPorts": {0: {"schema": "ActionResult"}},
"executor": "trigger",
"meta": {"icon": "mdi-clock", "color": "#2196F3", "usesAi": False},
"meta": {"icon": "mdi-clock", "color": "#2196F3"},
},
]

View file

@ -23,7 +23,7 @@ TRUSTEE_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-database-refresh", "color": "#4CAF50", "usesAi": False},
"meta": {"icon": "mdi-database-refresh", "color": "#4CAF50"},
"_method": "trustee",
"_action": "refreshAccountingData",
},
@ -47,7 +47,7 @@ TRUSTEE_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "DocumentList"}},
"meta": {"icon": "mdi-file-document-scan", "color": "#4CAF50", "usesAi": True},
"meta": {"icon": "mdi-file-document-scan", "color": "#4CAF50"},
"_method": "trustee",
"_action": "extractFromFiles",
},
@ -66,7 +66,7 @@ TRUSTEE_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-file-document-check", "color": "#4CAF50", "usesAi": False},
"meta": {"icon": "mdi-file-document-check", "color": "#4CAF50"},
"_method": "trustee",
"_action": "processDocuments",
},
@ -85,7 +85,7 @@ TRUSTEE_NODES = [
"outputs": 1,
"inputPorts": {0: {"accepts": ["Transit"]}},
"outputPorts": {0: {"schema": "ActionResult"}},
"meta": {"icon": "mdi-calculator", "color": "#4CAF50", "usesAi": False},
"meta": {"icon": "mdi-calculator", "color": "#4CAF50"},
"_method": "trustee",
"_action": "syncToAccounting",
},

View file

@ -88,7 +88,6 @@ def getNodeTypesForApi(
{"id": "input", "label": "Eingabe/Mensch"},
{"id": "flow", "label": "Ablauf"},
{"id": "data", "label": "Daten"},
{"id": "context", "label": "Kontext"},
{"id": "ai", "label": "KI"},
{"id": "file", "label": "Datei"},
{"id": "email", "label": "E-Mail"},

View file

@ -152,21 +152,6 @@ PORT_TYPE_CATALOG: Dict[str, PortSchema] = {
description="Ergebnisdaten"),
]),
"Transit": PortSchema(name="Transit", fields=[]),
"UdmDocument": PortSchema(name="UdmDocument", fields=[
PortField(name="id", type="str", description="Dokument-ID"),
PortField(name="sourceType", type="str", description="Quellformat (pdf, docx, …)"),
PortField(name="sourcePath", type="str", description="Quellpfad"),
PortField(name="children", type="List[Any]", description="StructuralNodes"),
]),
"UdmNodeList": PortSchema(name="UdmNodeList", fields=[
PortField(name="nodes", type="List[Any]", description="UDM StructuralNodes oder ContentBlocks"),
PortField(name="count", type="int", description="Anzahl"),
]),
"ConsolidateResult": PortSchema(name="ConsolidateResult", fields=[
PortField(name="result", type="Any", description="Konsolidiertes Ergebnis"),
PortField(name="mode", type="str", description="Konsolidierungsmodus"),
PortField(name="count", type="int", description="Anzahl verarbeiteter Elemente"),
]),
}
@ -427,36 +412,6 @@ def _extractMergeResult(upstream: Dict[str, Any]) -> Dict[str, Any]:
}
def _extractUdmDocument(upstream: Dict[str, Any]) -> Dict[str, Any]:
"""Extract UdmDocument fields from upstream output."""
if upstream.get("children") is not None and upstream.get("sourceType"):
return upstream
udm = upstream.get("udm")
if isinstance(udm, dict) and udm.get("children") is not None:
return udm
return {}
def _extractUdmNodeList(upstream: Dict[str, Any]) -> Dict[str, Any]:
"""Extract UdmNodeList fields from upstream output."""
nodes = upstream.get("nodes")
if isinstance(nodes, list):
return {"nodes": nodes, "count": len(nodes)}
children = upstream.get("children")
if isinstance(children, list):
return {"nodes": children, "count": len(children)}
return {}
def _extractConsolidateResult(upstream: Dict[str, Any]) -> Dict[str, Any]:
"""Extract ConsolidateResult fields from upstream output."""
result = {}
for key in ("result", "mode", "count"):
if key in upstream:
result[key] = upstream[key]
return result
INPUT_EXTRACTORS: Dict[str, Callable] = {
"EmailDraft": _extractEmailDraft,
"DocumentList": _extractDocuments,
@ -470,9 +425,6 @@ INPUT_EXTRACTORS: Dict[str, Callable] = {
"TaskResult": _extractTaskResult,
"AggregateResult": _extractAggregateResult,
"MergeResult": _extractMergeResult,
"UdmDocument": _extractUdmDocument,
"UdmNodeList": _extractUdmNodeList,
"ConsolidateResult": _extractConsolidateResult,
}

View file

@ -470,63 +470,16 @@ def share_template(
# -------------------------------------------------------------------------
def _editorChatQueueId(workflowId: str) -> str:
"""Deterministic SSE queue id for the editor chat (one active stream per workflow).
Mirrors the workspace pattern (``workspace-{workflowId}``) so stop/cancel can
target the running task by workflowId without needing per-request handles.
"""
return f"ge-chat-{workflowId}"
def _getEditorChatInterface(context: RequestContext, mandateId: str, instanceId: str):
"""Build the ChatObjects interface used to persist editor-chat messages."""
from modules.interfaces import interfaceDbChat
return interfaceDbChat.getInterface(
context.user,
mandateId=mandateId,
featureInstanceId=instanceId,
)
def _editorConversationHistoryFromPersisted(chatInterface, chatWorkflowId: str) -> List[Dict[str, Any]]:
"""Load persisted ChatMessages for the editor chat and shape them as the
agent expects (``[{role, message}]``). Skips empty / system messages.
"""
try:
msgs = chatInterface.getMessages(chatWorkflowId) or []
except Exception as e:
logger.warning("Editor chat: could not load persisted history for %s: %s", chatWorkflowId, e)
return []
history: List[Dict[str, Any]] = []
for m in msgs:
role = (getattr(m, "role", None) or (m.get("role") if isinstance(m, dict) else None) or "").strip()
text = (getattr(m, "message", None) or (m.get("message") if isinstance(m, dict) else None) or "").strip()
if not role or not text:
continue
if role not in ("user", "assistant", "system"):
continue
history.append({"role": role, "message": text})
return history
@router.post("/{instanceId}/{workflowId}/chat/stream")
@limiter.limit("30/minute")
async def post_editor_chat(
request: Request,
instanceId: str = Path(..., description="Feature instance ID"),
workflowId: str = Path(..., description="Workflow ID"),
body: dict = Body(..., description="{ message, userLanguage? }"),
body: dict = Body(..., description="{ message, conversationHistory?, userLanguage? }"),
context: RequestContext = Depends(getRequestContext),
):
"""AI chat endpoint for the editor with SSE streaming. Uses workflow tools to mutate the graph.
Persistence: the chat is stored in the standard ``ChatWorkflow`` table linked
to this Automation2Workflow via ``ChatWorkflow.linkedWorkflowId``. The user
message is persisted before the agent starts; the assistant message after.
Conversation history is loaded server-side from this linked ChatWorkflow
the client does not need to maintain it.
"""
"""AI chat endpoint for the editor with SSE streaming. Uses workflow tools to mutate the graph."""
mandateId = _validateInstanceAccess(instanceId, context)
message = body.get("message", "")
if not message:
@ -538,35 +491,14 @@ async def post_editor_chat(
raise HTTPException(status_code=404, detail=routeApiMsg("Workflow not found"))
userLanguage = body.get("userLanguage", "de")
conversationHistory = body.get("conversationHistory") or []
fileIds = body.get("fileIds") or []
dataSourceIds = body.get("dataSourceIds") or []
featureDataSourceIds = body.get("featureDataSourceIds") or []
chatInterface = _getEditorChatInterface(context, mandateId, instanceId)
wfLabel = wf.get("label") if isinstance(wf, dict) else getattr(wf, "label", None)
chatWorkflow = chatInterface.getOrCreateLinkedWorkflow(
featureInstanceId=instanceId,
linkedWorkflowId=workflowId,
name=wfLabel or f"Editor Chat ({workflowId})",
)
chatWorkflowId = chatWorkflow.id if hasattr(chatWorkflow, "id") else chatWorkflow.get("id")
conversationHistory = _editorConversationHistoryFromPersisted(chatInterface, chatWorkflowId)
try:
chatInterface.createMessage({
"workflowId": chatWorkflowId,
"role": "user",
"message": message,
"status": "first" if not conversationHistory else "step",
})
except Exception as e:
logger.error("Editor chat: failed to persist user message: %s", e)
from modules.serviceCenter.core.serviceStreaming import get_event_manager
sseEventManager = get_event_manager()
queueId = _editorChatQueueId(workflowId)
await sseEventManager.cancel_agent(queueId)
queueId = f"ge-chat-{workflowId}-{id(request)}"
sseEventManager.create_queue(queueId)
agentTask = asyncio.ensure_future(
@ -583,8 +515,6 @@ async def post_editor_chat(
fileIds=fileIds,
dataSourceIds=dataSourceIds,
featureDataSourceIds=featureDataSourceIds,
chatInterface=chatInterface,
chatWorkflowId=chatWorkflowId,
)
)
sseEventManager.register_agent_task(queueId, agentTask)
@ -619,80 +549,6 @@ async def post_editor_chat(
)
@router.get("/{instanceId}/{workflowId}/chat/messages")
@limiter.limit("120/minute")
def get_editor_chat_messages(
request: Request,
instanceId: str = Path(..., description="Feature instance ID"),
workflowId: str = Path(..., description="Workflow ID (Automation2Workflow)"),
context: RequestContext = Depends(getRequestContext),
):
"""Return persisted editor-chat messages for an Automation2Workflow.
The chat is stored in ``ChatWorkflow`` with ``linkedWorkflowId == workflowId``;
if no chat has been started yet for this workflow we return an empty list (we
do NOT eagerly create one the row is created on the first POST /chat/stream).
"""
mandateId = _validateInstanceAccess(instanceId, context)
chatInterface = _getEditorChatInterface(context, mandateId, instanceId)
chatWorkflow = chatInterface.getWorkflowByLink(
featureInstanceId=instanceId,
linkedWorkflowId=workflowId,
)
if not chatWorkflow:
return JSONResponse({
"chatWorkflowId": None,
"messages": [],
})
chatWorkflowId = chatWorkflow.id if hasattr(chatWorkflow, "id") else chatWorkflow.get("id")
rawMessages = chatInterface.getMessages(chatWorkflowId) or []
items: List[Dict[str, Any]] = []
for m in rawMessages:
getter = (lambda key, default=None: getattr(m, key, default)) if not isinstance(m, dict) else (lambda key, default=None: m.get(key, default))
role = (getter("role") or "").strip()
content = (getter("message") or "").strip()
if not role or not content:
continue
items.append({
"id": getter("id"),
"role": role,
"content": content,
"timestamp": getter("publishedAt") or 0,
"sequenceNr": getter("sequenceNr") or 0,
})
items.sort(key=lambda x: (float(x.get("timestamp") or 0), int(x.get("sequenceNr") or 0)))
return JSONResponse({
"chatWorkflowId": chatWorkflowId,
"messages": items,
})
@router.post("/{instanceId}/{workflowId}/chat/stop")
@limiter.limit("120/minute")
async def post_editor_chat_stop(
request: Request,
instanceId: str = Path(..., description="Feature instance ID"),
workflowId: str = Path(..., description="Workflow ID"),
context: RequestContext = Depends(getRequestContext),
):
"""Stop a running editor-chat agent for the given workflow."""
_validateInstanceAccess(instanceId, context)
from modules.serviceCenter.core.serviceStreaming import get_event_manager
sseEventManager = get_event_manager()
queueId = _editorChatQueueId(workflowId)
cancelled = await sseEventManager.cancel_agent(queueId)
await sseEventManager.emit_event(queueId, "stopped", {
"type": "stopped",
"workflowId": workflowId,
})
logger.info("Editor chat stop requested for workflow %s, cancelled=%s", workflowId, cancelled)
return JSONResponse({"status": "stopped", "workflowId": workflowId, "cancelled": cancelled})
async def _runEditorAgent(
workflowId: str,
queueId: str,
@ -706,41 +562,12 @@ async def _runEditorAgent(
fileIds: List[str] = None,
dataSourceIds: List[str] = None,
featureDataSourceIds: List[str] = None,
chatInterface=None,
chatWorkflowId: Optional[str] = None,
):
"""Run the serviceAgent loop with workflow toolbox and forward events to the SSE queue.
Persists the assistant response to ``ChatMessage`` (linked via ``chatWorkflowId``)
on FINAL/ERROR. On cancellation any partial accumulated text is still saved so
the editor chat history reflects what the user actually saw on screen.
"""
assistantPersisted = False
def _persistAssistant(text: str) -> None:
nonlocal assistantPersisted
if assistantPersisted or not chatInterface or not chatWorkflowId:
return
cleaned = (text or "").strip()
if not cleaned:
return
try:
chatInterface.createMessage({
"workflowId": chatWorkflowId,
"role": "assistant",
"message": cleaned,
"status": "last",
})
assistantPersisted = True
except Exception as msgErr:
logger.error("Editor chat: failed to persist assistant message: %s", msgErr)
"""Run the serviceAgent loop with workflow toolbox and forward events to the SSE queue."""
try:
from modules.serviceCenter import getService
from modules.serviceCenter.context import ServiceCenterContext
from modules.serviceCenter.services.serviceAgent.datamodelAgent import (
AgentEventTypeEnum, AgentConfig,
)
from modules.serviceCenter.services.serviceAgent.datamodelAgent import AgentEventTypeEnum
ctx = ServiceCenterContext(
user=user,
@ -752,41 +579,11 @@ async def _runEditorAgent(
agentService = getService("agent", ctx)
systemPrompt = (
"You are a workflow EDITOR assistant for the GraphicalEditor. "
"Your ONLY job is to BUILD or MODIFY the workflow graph (nodes + connections) "
"for the user — you must NEVER execute the workflow or any of its actions. "
"Even when the user says 'create a workflow that sends an email', you build the "
"graph (e.g. add an email node, connect it) — you do NOT actually send an email. "
"\n\nGraph-mutating tools: readWorkflowGraph, listAvailableNodeTypes, "
"describeNodeType, addNode, removeNode, connectNodes, setNodeParameter, "
"autoLayoutWorkflow, validateGraph. "
"Connection discovery (for parameters of frontendType='userConnection'): listConnections."
"\n\nMandatory build sequence:"
"\n1. readWorkflowGraph — understand current state."
"\n2. listAvailableNodeTypes — find candidate node ids."
"\n3. For EACH node type you plan to add: call describeNodeType(nodeType=...) "
"to learn its requiredParameters, allowedValues and ports. Never skip this "
"step — guessing parameters leaves the user with empty config cards."
"\n4. If any required parameter has frontendType='userConnection' (e.g. "
"email.checkEmail.connectionReference), call listConnections and pick the "
"connectionId that matches the user's intent (or ask the user if none clearly fits)."
"\n5. addNode with parameters={...} containing AT LEAST every requiredParameter "
"filled with a sensible value (use the user's request, the parameter "
"description, sane defaults, or — for required user-connection fields — "
"an actual connectionId). Do NOT pass position; the layout step handles it."
"\n6. connectNodes — wire the nodes consistent with port schemas from describeNodeType."
"\n7. autoLayoutWorkflow — call exactly once as the LAST graph-mutating step so the "
"canvas shows a readable top-down layout instead of overlapping boxes."
"\n8. validateGraph — sanity check, then answer the user."
"\n\nIf a required parameter cannot be filled from the user's request and has "
"no safe default, ask the user once for that specific value (e.g. recipient "
"address, target language, prompt text) instead of leaving the field blank. "
"Respond concisely in the user's language and list what you changed in the graph."
)
editorConfig = AgentConfig(
toolSet="core",
excludeActionTools=True,
"You are a workflow editor assistant. The user describes changes to a workflow graph. "
"Use the available workflow tools (readWorkflowGraph, addNode, removeNode, connectNodes, "
"setNodeParameter, listAvailableNodeTypes, validateGraph) to modify the graph. "
"Always read the current graph first before making changes. "
"Respond concisely and confirm what you changed."
)
enrichedPrompt = prompt
@ -808,7 +605,6 @@ async def _runEditorAgent(
async for event in agentService.runAgent(
prompt=enrichedPrompt,
fileIds=fileIds or [],
config=editorConfig,
workflowId=workflowId,
userLanguage=userLanguage,
conversationHistory=conversationHistory or [],
@ -835,13 +631,8 @@ async def _runEditorAgent(
await sseEventManager.emit_event(queueId, sseEvent["type"], sseEvent)
if event.type in (AgentEventTypeEnum.FINAL, AgentEventTypeEnum.ERROR):
_persistAssistant(event.content or accumulatedText)
break
# Fallback: any streamed content not yet stored (cancellation path, no FINAL).
if not assistantPersisted and accumulatedText.strip():
_persistAssistant(accumulatedText)
await sseEventManager.emit_event(queueId, "complete", {
"type": "complete",
"workflowId": workflowId,
@ -849,12 +640,6 @@ async def _runEditorAgent(
except asyncio.CancelledError:
logger.info("Editor chat agent task cancelled for workflow %s", workflowId)
# Save whatever the user already saw before cancelling so the next reload
# shows the same partial answer (matches workspace behaviour).
try:
_persistAssistant(accumulatedText if "accumulatedText" in locals() else "")
except Exception:
pass
await sseEventManager.emit_event(queueId, "stopped", {
"type": "stopped",
"workflowId": workflowId,

View file

@ -27,33 +27,15 @@ class DataNeutraliserConfig(PowerOnModel):
)
mandateId: str = Field(
description="ID of the mandate this configuration belongs to",
json_schema_extra={
"label": "Mandanten-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
json_schema_extra={"label": "Mandanten-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True},
)
featureInstanceId: str = Field(
description="ID of the feature instance this configuration belongs to",
json_schema_extra={
"label": "Feature-Instanz-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
json_schema_extra={"label": "Feature-Instanz-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True},
)
userId: str = Field(
description="ID of the user who created this configuration",
json_schema_extra={
"label": "Benutzer-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"},
},
json_schema_extra={"label": "Benutzer-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True},
)
enabled: bool = Field(
default=True,
@ -102,33 +84,15 @@ class DataNeutralizerAttributes(BaseModel):
)
mandateId: str = Field(
description="ID of the mandate this attribute belongs to",
json_schema_extra={
"label": "Mandanten-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
json_schema_extra={"label": "Mandanten-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True},
)
featureInstanceId: str = Field(
description="ID of the feature instance this attribute belongs to",
json_schema_extra={
"label": "Feature-Instanz-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
json_schema_extra={"label": "Feature-Instanz-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True},
)
userId: str = Field(
description="ID of the user who created this attribute",
json_schema_extra={
"label": "Benutzer-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"},
},
json_schema_extra={"label": "Benutzer-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True},
)
originalText: str = Field(
description="Original text that was neutralized",
@ -137,13 +101,7 @@ class DataNeutralizerAttributes(BaseModel):
fileId: Optional[str] = Field(
default=None,
description="ID of the file this attribute belongs to",
json_schema_extra={
"label": "Datei-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"fk_target": {"db": "poweron_management", "table": "FileItem"},
},
json_schema_extra={"label": "Datei-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
)
patternType: str = Field(
description="Type of pattern that matched (email, phone, name, etc.)",
@ -160,16 +118,16 @@ class DataNeutralizationSnapshot(BaseModel):
)
mandateId: str = Field(
description="Mandate scope",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
json_schema_extra={"label": "Mandanten-ID"},
)
featureInstanceId: str = Field(
default="",
description="Feature instance scope",
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
json_schema_extra={"label": "Feature-Instanz-ID"},
)
userId: str = Field(
description="User who triggered neutralization",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
json_schema_extra={"label": "Benutzer-ID"},
)
sourceLabel: str = Field(
description="Human label, e.g. 'Prompt', 'Kontext', 'Nachricht 3'",

View file

@ -14,7 +14,6 @@ from modules.features.neutralization.datamodelFeatureNeutralizer import (
DataNeutralizationSnapshot,
)
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.dbRegistry import registerDatabase
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
from modules.shared.configuration import APP_CONFIG
from modules.shared.timeUtils import getUtcTimestamp
@ -22,9 +21,6 @@ from modules.datamodels.datamodelUam import User
logger = logging.getLogger(__name__)
neutralizationDatabase = "poweron_neutralization"
registerDatabase(neutralizationDatabase)
# Singleton cache for interface instances
_neutralizerInterfaces = {}
@ -58,7 +54,7 @@ class InterfaceFeatureNeutralizer:
try:
# Use same database config pattern as other feature interfaces
dbHost = APP_CONFIG.get("DB_HOST", "localhost")
dbDatabase = neutralizationDatabase
dbDatabase = "poweron_neutralization"
dbUser = APP_CONFIG.get("DB_USER", "postgres")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -284,12 +284,9 @@ class Kanton(PowerOnModel):
id_land: Optional[str] = Field(
None,
description="Land ID (Foreign Key) - eindeutiger Link zum Land, in welchem Land der Kanton liegt",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": False,
"frontend_required": False,
"fk_target": {"db": "poweron_realestate", "table": "Land"},
},
frontend_type="text",
frontend_readonly=False,
frontend_required=False,
)
abk: Optional[str] = Field(
None,
@ -344,12 +341,9 @@ class Gemeinde(BaseModel):
id_kanton: Optional[str] = Field(
None,
description="Kanton ID (Foreign Key) - eindeutiger Link zum Kanton, in welchem Kanton die Gemeinde liegt",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": False,
"frontend_required": False,
"fk_target": {"db": "poweron_realestate", "table": "Kanton"},
},
frontend_type="text",
frontend_readonly=False,
frontend_required=False,
)
plz: Optional[str] = Field(
None,
@ -393,23 +387,17 @@ class Parzelle(PowerOnModel):
)
mandateId: str = Field(
description="ID of the mandate",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Mandats-ID",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
frontend_type="text",
frontend_readonly=True,
frontend_required=False,
label="Mandats-ID",
)
featureInstanceId: str = Field(
description="ID of the feature instance",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Feature-Instanz-ID",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
frontend_type="text",
frontend_readonly=True,
frontend_required=False,
label="Feature-Instanz-ID",
)
# Grunddaten
@ -468,12 +456,9 @@ class Parzelle(PowerOnModel):
kontextGemeinde: Optional[str] = Field(
None,
description="Municipality ID (Foreign Key)",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": False,
"frontend_required": False,
"fk_target": {"db": "poweron_realestate", "table": "Gemeinde"},
},
frontend_type="text",
frontend_readonly=False,
frontend_required=False,
)
# Bebauungsparameter
@ -633,23 +618,17 @@ class Projekt(PowerOnModel):
)
mandateId: str = Field(
description="ID of the mandate",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Mandats-ID",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
frontend_type="text",
frontend_readonly=True,
frontend_required=False,
label="Mandats-ID",
)
featureInstanceId: str = Field(
description="ID of the feature instance",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"label": "Feature-Instanz-ID",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
frontend_type="text",
frontend_readonly=True,
frontend_required=False,
label="Feature-Instanz-ID",
)
label: str = Field(
description="Project designation",

View file

@ -21,7 +21,6 @@ from .datamodelFeatureRealEstate import (
from modules.datamodels.datamodelUam import User
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
from modules.security.rbac import RbacClass
from modules.datamodels.datamodelRbac import AccessRuleContext
from modules.datamodels.datamodelUam import AccessLevel
@ -30,9 +29,6 @@ from modules.datamodels.datamodelPagination import PaginationParams, PaginatedRe
logger = logging.getLogger(__name__)
realEstateDatabase = "poweron_realestate"
registerDatabase(realEstateDatabase)
# Singleton factory for Real Estate interfaces
_realEstateInterfaces = {}
@ -75,7 +71,7 @@ class RealEstateObjects:
try:
# Get database configuration from environment
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = realEstateDatabase
dbDatabase = "poweron_realestate"
dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -116,7 +116,7 @@ def _validateInstanceAccess(instanceId: str, context: RequestContext) -> str:
status_code=400,
detail=f"Instance '{instanceId}' is not a realestate instance"
)
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
featureAccesses = rootInterface.getFeatureAccessesForUser(str(context.user.id))
hasAccess = any(
str(fa.featureInstanceId) == instanceId and fa.enabled

View file

@ -11,7 +11,6 @@ from typing import Dict, Any, List, Optional
from modules.datamodels.datamodelUam import User
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
from .datamodelTeamsbot import (
TeamsbotSession,
@ -25,9 +24,6 @@ from .datamodelTeamsbot import (
logger = logging.getLogger(__name__)
teamsbotDatabase = "poweron_teamsbot"
registerDatabase(teamsbotDatabase)
# Singleton factory
_interfaces = {}
@ -54,7 +50,7 @@ class TeamsbotObjects:
self.userId = str(currentUser.id) if currentUser else "system"
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = teamsbotDatabase
dbDatabase = "poweron_teamsbot"
dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -138,7 +138,7 @@ def _validateInstanceAccess(instanceId: str, context: RequestContext) -> str:
def _validateSessionOwnership(session: dict, context: RequestContext) -> None:
"""Raise 404 if the user does not own this session (sysAdmin bypasses)."""
if context.isPlatformAdmin:
if context.hasSysAdminRole:
return
if session.get("startedByUserId") != str(context.user.id):
raise HTTPException(status_code=404, detail=f"Session '{session.get('id')}' not found")
@ -319,7 +319,7 @@ async def listSessions(
"""List sessions for a feature instance (filtered to own sessions unless sysAdmin)."""
_validateInstanceAccess(instanceId, context)
interface = _getInterface(context, instanceId)
userId = None if context.isPlatformAdmin else str(context.user.id)
userId = None if context.hasSysAdminRole else str(context.user.id)
sessions = interface.getSessions(instanceId, includeEnded=includeEnded, userId=userId)
return {"sessions": sessions}

View file

@ -46,7 +46,6 @@ class TrusteeOrganisation(PowerOnModel):
description="Mandate ID (system-level organisation)",
json_schema_extra={
"label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False
@ -57,7 +56,6 @@ class TrusteeOrganisation(PowerOnModel):
description="Feature Instance ID for instance-level isolation",
json_schema_extra={
"label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False
@ -92,7 +90,6 @@ class TrusteeRole(PowerOnModel):
description="Mandate ID",
json_schema_extra={
"label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False
@ -103,7 +100,6 @@ class TrusteeRole(PowerOnModel):
description="Feature Instance ID for instance-level isolation",
json_schema_extra={
"label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False
@ -131,8 +127,7 @@ class TrusteeAccess(PowerOnModel):
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_options": "/api/trustee/{instanceId}/organisations/options",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation"},
"frontend_options": "/api/trustee/{instanceId}/organisations/options"
}
)
roleId: str = Field(
@ -142,8 +137,7 @@ class TrusteeAccess(PowerOnModel):
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_options": "/api/trustee/{instanceId}/roles/options",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeRole"},
"frontend_options": "/api/trustee/{instanceId}/roles/options"
}
)
userId: str = Field(
@ -153,8 +147,7 @@ class TrusteeAccess(PowerOnModel):
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": True,
"frontend_options": "/api/users/options",
"fk_target": {"db": "poweron_app", "table": "User"},
"frontend_options": "/api/users/options"
}
)
contractId: Optional[str] = Field(
@ -166,8 +159,7 @@ class TrusteeAccess(PowerOnModel):
"frontend_readonly": False,
"frontend_required": False,
"frontend_options": "/api/trustee/{instanceId}/contracts/options",
"frontend_depends_on": "organisationId",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeContract"},
"frontend_depends_on": "organisationId"
}
)
mandateId: Optional[str] = Field(
@ -175,7 +167,6 @@ class TrusteeAccess(PowerOnModel):
description="Mandate ID",
json_schema_extra={
"label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False
@ -186,7 +177,6 @@ class TrusteeAccess(PowerOnModel):
description="Feature Instance ID for instance-level isolation",
json_schema_extra={
"label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False
@ -214,8 +204,7 @@ class TrusteeContract(PowerOnModel):
"frontend_type": "select",
"frontend_readonly": False, # Editable at creation, then readonly
"frontend_required": True,
"frontend_options": "/api/trustee/{instanceId}/organisations/options",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation"},
"frontend_options": "/api/trustee/{instanceId}/organisations/options"
}
)
label: str = Field(
@ -242,7 +231,6 @@ class TrusteeContract(PowerOnModel):
description="Mandate ID",
json_schema_extra={
"label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False
@ -253,7 +241,6 @@ class TrusteeContract(PowerOnModel):
description="Feature Instance ID for instance-level isolation",
json_schema_extra={
"label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False
@ -310,8 +297,7 @@ class TrusteeDocument(PowerOnModel):
"label": "Datei-Referenz",
"frontend_type": "file_reference",
"frontend_readonly": False,
"frontend_required": False,
"fk_target": {"db": "poweron_management", "table": "FileItem"},
"frontend_required": False
}
)
documentName: str = Field(
@ -359,7 +345,6 @@ class TrusteeDocument(PowerOnModel):
description="Mandate ID (auto-set from context)",
json_schema_extra={
"label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
@ -371,7 +356,6 @@ class TrusteeDocument(PowerOnModel):
description="Feature Instance ID for instance-level isolation (auto-set from context)",
json_schema_extra={
"label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
@ -438,8 +422,7 @@ class TrusteePosition(PowerOnModel):
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": False,
"frontend_options": "/api/trustee/{instanceId}/documents/options",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument"},
"frontend_options": "/api/trustee/{instanceId}/documents/options"
}
)
bankDocumentId: Optional[str] = Field(
@ -450,8 +433,7 @@ class TrusteePosition(PowerOnModel):
"frontend_type": "select",
"frontend_readonly": False,
"frontend_required": False,
"frontend_options": "/api/trustee/{instanceId}/documents/options",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument"},
"frontend_options": "/api/trustee/{instanceId}/documents/options"
}
)
valuta: Optional[str] = Field(
@ -695,7 +677,6 @@ class TrusteePosition(PowerOnModel):
description="Mandate ID (auto-set from context)",
json_schema_extra={
"label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
@ -707,7 +688,6 @@ class TrusteePosition(PowerOnModel):
description="Feature Instance ID for instance-level isolation (auto-set from context)",
json_schema_extra={
"label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
@ -738,8 +718,8 @@ class TrusteeDataAccount(PowerOnModel):
accountGroup: Optional[str] = Field(default=None, description="Account group/category", json_schema_extra={"label": "Gruppe"})
currency: str = Field(default="CHF", description="Account currency", json_schema_extra={"label": "Währung"})
isActive: bool = Field(default=True, json_schema_extra={"label": "Aktiv"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat"})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz"})
@i18nModel("Buchung (Sync)")
class TrusteeDataJournalEntry(PowerOnModel):
@ -751,14 +731,14 @@ class TrusteeDataJournalEntry(PowerOnModel):
description: str = Field(default="", description="Booking text", json_schema_extra={"label": "Beschreibung"})
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
totalAmount: float = Field(default=0.0, description="Total amount of entry", json_schema_extra={"label": "Betrag"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat"})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz"})
@i18nModel("Buchungszeile (Sync)")
class TrusteeDataJournalLine(PowerOnModel):
"""Journal entry line (debit/credit) synced from external accounting system."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
journalEntryId: str = Field(description="FK → TrusteeDataJournalEntry.id", json_schema_extra={"label": "Buchung", "fk_target": {"db": "poweron_trustee", "table": "TrusteeDataJournalEntry"}})
journalEntryId: str = Field(description="FK → TrusteeDataJournalEntry.id", json_schema_extra={"label": "Buchung"})
accountNumber: str = Field(description="Account number", json_schema_extra={"label": "Konto"})
debitAmount: float = Field(default=0.0, json_schema_extra={"label": "Soll"})
creditAmount: float = Field(default=0.0, json_schema_extra={"label": "Haben"})
@ -766,8 +746,8 @@ class TrusteeDataJournalLine(PowerOnModel):
taxCode: Optional[str] = Field(default=None, json_schema_extra={"label": "Steuercode"})
costCenter: Optional[str] = Field(default=None, json_schema_extra={"label": "Kostenstelle"})
description: str = Field(default="", json_schema_extra={"label": "Beschreibung"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat"})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz"})
@i18nModel("Kontakt (Sync)")
class TrusteeDataContact(PowerOnModel):
@ -784,8 +764,8 @@ class TrusteeDataContact(PowerOnModel):
email: Optional[str] = Field(default=None, json_schema_extra={"label": "E-Mail"})
phone: Optional[str] = Field(default=None, json_schema_extra={"label": "Telefon"})
vatNumber: Optional[str] = Field(default=None, json_schema_extra={"label": "MWST-Nr."})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat"})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz"})
@i18nModel("Kontosaldo (Sync)")
class TrusteeDataAccountBalance(PowerOnModel):
@ -799,8 +779,8 @@ class TrusteeDataAccountBalance(PowerOnModel):
creditTotal: float = Field(default=0.0, json_schema_extra={"label": "Haben-Umsatz"})
closingBalance: float = Field(default=0.0, json_schema_extra={"label": "Schlusssaldo"})
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat"})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz"})
@i18nModel("Buchhaltungs-Konfiguration")
class TrusteeAccountingConfig(PowerOnModel):
@ -810,7 +790,7 @@ class TrusteeAccountingConfig(PowerOnModel):
Credentials are stored encrypted (decrypted at runtime by the AccountingBridge).
"""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
featureInstanceId: str = Field(description="FK -> FeatureInstance.id (1:1)", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
featureInstanceId: str = Field(description="FK -> FeatureInstance.id (1:1)", json_schema_extra={"label": "Feature-Instanz"})
connectorType: str = Field(description="Connector type key, e.g. 'rma', 'bexio', 'abacus'", json_schema_extra={"label": "System"})
displayLabel: str = Field(default="", description="User-visible label for this integration", json_schema_extra={"label": "Bezeichnung"})
encryptedConfig: str = Field(default="", description="Encrypted JSON blob with connector credentials", json_schema_extra={"label": "Verschlüsselte Konfiguration"})
@ -820,7 +800,7 @@ class TrusteeAccountingConfig(PowerOnModel):
lastSyncErrorMessage: Optional[str] = Field(default=None, description="Error message when lastSyncStatus is error", json_schema_extra={"label": "Fehlermeldung"})
cachedChartOfAccounts: Optional[str] = Field(default=None, description="JSON-serialised chart of accounts cache (list of {accountNumber, label, accountType})", json_schema_extra={"label": "Cached Kontoplan"})
chartCachedAt: Optional[float] = Field(default=None, description="Timestamp when cachedChartOfAccounts was last refreshed", json_schema_extra={"label": "Kontoplan-Cache-Zeitpunkt"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat"})
@i18nModel("Buchhaltungs-Synchronisation")
class TrusteeAccountingSync(PowerOnModel):
@ -829,11 +809,8 @@ class TrusteeAccountingSync(PowerOnModel):
Used for duplicate prevention, audit trail, and retry logic.
"""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
positionId: str = Field(
description="FK -> TrusteePosition.id",
json_schema_extra={"label": "Position", "fk_target": {"db": "poweron_trustee", "table": "TrusteePosition"}},
)
featureInstanceId: str = Field(description="FK -> FeatureInstance.id", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
positionId: str = Field(description="FK -> TrusteePosition.id", json_schema_extra={"label": "Position"})
featureInstanceId: str = Field(description="FK -> FeatureInstance.id", json_schema_extra={"label": "Feature-Instanz"})
connectorType: str = Field(description="Connector type at time of sync", json_schema_extra={"label": "System"})
externalId: Optional[str] = Field(default=None, description="ID assigned by the external system", json_schema_extra={"label": "Externe ID"})
externalReference: Optional[str] = Field(default=None, description="Reference in the external system", json_schema_extra={"label": "Externe Referenz"})
@ -842,5 +819,5 @@ class TrusteeAccountingSync(PowerOnModel):
syncedAt: Optional[float] = Field(default=None, description="Timestamp of successful sync", json_schema_extra={"label": "Synchronisiert am"})
errorMessage: Optional[str] = Field(default=None, json_schema_extra={"label": "Fehler"})
bookingPayload: Optional[dict] = Field(default=None, description="Payload sent to the external system (audit)", json_schema_extra={"label": "Buchungs-Payload"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat"})

View file

@ -14,7 +14,6 @@ from pydantic import ValidationError
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC, getRecordsetPaginatedWithRBAC, getDistinctColumnValuesWithRBAC
from modules.security.rbac import RbacClass
from modules.datamodels.datamodelUam import User, AccessLevel
@ -31,9 +30,6 @@ from modules.datamodels.datamodelPagination import PaginationParams, PaginatedRe
logger = logging.getLogger(__name__)
trusteeDatabase = "poweron_trustee"
registerDatabase(trusteeDatabase)
# Singleton factory for TrusteeObjects instances per context
_trusteeInterfaces = {}
@ -280,7 +276,7 @@ class TrusteeObjects:
"""Initializes the database connection directly."""
try:
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = trusteeDatabase
dbDatabase = "poweron_trustee"
dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -104,7 +104,7 @@ def _validateInstanceAccess(instanceId: str, context: RequestContext) -> str:
)
# Verify user has access to this instance
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
# Check if user has FeatureAccess for this instance
featureAccesses = rootInterface.getFeatureAccessesForUser(str(context.user.id))
hasAccess = any(
@ -138,7 +138,7 @@ def getQuickActions(
from .mainTrustee import QUICK_ACTIONS, QUICK_ACTION_CATEGORIES
userRoleLabels: set = set()
if context.isPlatformAdmin:
if context.hasSysAdminRole:
userRoleLabels.add("trustee-admin")
else:
rootInterface = getRootInterface()
@ -156,9 +156,9 @@ def getQuickActions(
filteredActions = []
for action in QUICK_ACTIONS:
required = set(action.get("requiredRoles", []))
if not userRoleLabels and not context.isPlatformAdmin:
if not userRoleLabels and not context.hasSysAdminRole:
continue
if context.isPlatformAdmin or required.intersection(userRoleLabels):
if context.hasSysAdminRole or required.intersection(userRoleLabels):
resolved = {
"id": action["id"],
"label": resolveText(action.get("label", {})),
@ -1563,13 +1563,7 @@ async def sync_positions_to_accounting(
raise HTTPException(status_code=400, detail=routeApiMsg("positionIds required"))
results = await bridge.pushBatchToAccounting(instanceId, positionIds)
skipped = [r for r in results if not r.success and r.errorMessage and "already synced" in r.errorMessage]
failed = [r for r in results if not r.success and r not in skipped]
if skipped:
logger.info(
"Accounting sync: %s position(s) already synced, skipped",
len(skipped),
)
failed = [r for r in results if not r.success]
if failed:
logger.warning(
"Accounting sync had %s failure(s): %s",
@ -1579,8 +1573,7 @@ async def sync_positions_to_accounting(
return {
"total": len(results),
"success": sum(1 for r in results if r.success),
"skipped": len(skipped),
"errors": len(failed),
"errors": sum(1 for r in results if not r.success),
"results": [r.model_dump() for r in results],
}
@ -1811,7 +1804,7 @@ def _validateInstanceAdmin(instanceId: str, context: RequestContext) -> str:
mandateId = _validateInstanceAccess(instanceId, context)
# SysAdmin role always has access
if context.isPlatformAdmin:
if context.hasSysAdminRole:
return mandateId
# Check for instance-roles.manage resource permission via AccessRules

View file

@ -19,33 +19,15 @@ class WorkspaceUserSettings(PowerOnModel):
)
userId: str = Field(
description="User ID",
json_schema_extra={
"label": "Benutzer-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"},
},
json_schema_extra={"label": "Benutzer-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True},
)
mandateId: str = Field(
description="Mandate ID",
json_schema_extra={
"label": "Mandanten-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Mandate"},
},
json_schema_extra={"label": "Mandanten-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True},
)
featureInstanceId: str = Field(
description="Feature Instance ID",
json_schema_extra={
"label": "Feature-Instanz-ID",
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
},
json_schema_extra={"label": "Feature-Instanz-ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True},
)
maxAgentRounds: Optional[int] = Field(
default=None,

View file

@ -9,7 +9,6 @@ import logging
from typing import Dict, Any, Optional
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.dbRegistry import registerDatabase
from modules.datamodels.datamodelUam import User
from modules.features.workspace.datamodelFeatureWorkspace import WorkspaceUserSettings
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
@ -18,9 +17,6 @@ from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__)
workspaceDatabase = "poweron_workspace"
registerDatabase(workspaceDatabase)
_workspaceInterfaces: Dict[str, "WorkspaceObjects"] = {}
@ -43,7 +39,7 @@ class WorkspaceObjects:
def _initializeDatabase(self):
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = workspaceDatabase
dbDatabase = "poweron_workspace"
dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -1464,18 +1464,18 @@ async def listFeatureConnectionTables(
tables = []
for obj in accessible:
meta = obj.get("meta", {})
if meta.get("wildcard"):
continue
node = {
"objectKey": obj.get("objectKey", ""),
"tableName": meta.get("table", ""),
"label": resolveText(obj.get("label", "")),
"fields": meta.get("fields", []),
"isParent": bool(meta.get("isParent", False)),
"parentTable": meta.get("parentTable") or None,
"parentKey": meta.get("parentKey") or None,
"displayFields": meta.get("displayFields", []),
}
if meta.get("isParent"):
node["isParent"] = True
node["displayFields"] = meta.get("displayFields", [])
if meta.get("parentTable"):
node["parentTable"] = meta["parentTable"]
node["parentKey"] = meta.get("parentKey", "")
tables.append(node)
return JSONResponse({"tables": tables})

View file

@ -7,7 +7,7 @@ Contains all bootstrap logic including mandate, users, and RBAC rules.
Multi-Tenant Design:
- Rollen werden mit Kontext erstellt (mandateId=None für globale Template-Rollen)
- AccessRules referenzieren roleId (FK), nicht roleLabel
- Admin-User bekommt isSysAdmin=True UND isPlatformAdmin=True (statt einer Rolle)
- Admin-User bekommt isSysAdmin=True statt roleLabels
"""
import logging
@ -61,7 +61,6 @@ def initBootstrap(db: DatabaseConnector) -> None:
# Migrate existing mandate records: description -> label
_migrateMandateDescriptionToLabel(db)
_migrateMandateNameLabelSlugRules(db)
# Clean up duplicate roles and fix corrupted templates FIRST
_deduplicateRoles(db)
@ -76,14 +75,12 @@ def initBootstrap(db: DatabaseConnector) -> None:
# This also serves as migration for existing mandates that don't have instance roles yet
_ensureAllMandatesHaveSystemRoles(db)
# Migration: eliminate the legacy ``sysadmin`` role in root mandate
# (replaced by ``User.isPlatformAdmin`` flag — see
# wiki/c-work/4-done/2026-04-sysadmin-authority-split.md).
# Idempotent: noop after first successful run.
# Initialize sysadmin role in root mandate (NOT a template, mandate-specific)
# Hybrid model: isSysAdmin flag → system ops, sysadmin role → admin ops via RBAC
if mandateId:
_migrateAndDropSysAdminRole(db, mandateId)
# Ensure UI rules for navigation items (admin/user/viewer roles)
_initSysAdminRole(db, mandateId)
# Ensure UI rules for sysadmin role (created after initRbacRules, needs second pass)
_ensureUiContextRules(db)
# Initialize admin user
@ -162,12 +159,11 @@ def _bootstrapSystemTemplates(db: DatabaseConnector) -> None:
"""
try:
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import AutoWorkflow
from modules.features.graphicalEditor.interfaceFeatureGraphicalEditor import graphicalEditorDatabase
import uuid
greenfieldDb = DatabaseConnector(
dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
dbDatabase=graphicalEditorDatabase,
dbDatabase="poweron_graphicaleditor",
dbUser=APP_CONFIG.get("DB_USER"),
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"),
)
@ -423,101 +419,32 @@ def _migrateMandateDescriptionToLabel(db: DatabaseConnector) -> None:
logger.debug("No mandate description->label migration needed")
def _migrateMandateNameLabelSlugRules(db: DatabaseConnector) -> None:
"""
Migration: normalize Mandate.name to the slug rules ([a-z0-9-], length 2..32, single
hyphen segments) and ensure Mandate.label is non-empty.
Rules (see wiki/c-work/1-plan/2026-04-mandate-name-label-logic.md):
1. If ``label`` is empty/None set ``label := name`` (or "Mandate" when both empty).
2. If ``name`` is not a valid slug, or collides with an earlier mandate in stable id
order, allocate a unique slug from the (now non-empty) ``label`` using
``slugifyMandateName`` + ``allocateUniqueMandateSlug``.
Idempotent: a second run is a no-op because all valid names stay valid and stay unique.
Each rename and label fill-in is logged for audit.
"""
from modules.shared.mandateNameUtils import (
allocateUniqueMandateSlug,
isValidMandateName,
slugifyMandateName,
)
allRows = db.getRecordset(Mandate)
if not allRows:
return
sortedRows = sorted(allRows, key=lambda r: str(r.get("id", "")))
used: set[str] = set()
labelFills = 0
nameRenames: list[tuple[str, str, str]] = []
for rec in sortedRows:
mid = rec.get("id")
if not mid:
continue
name = (rec.get("name") or "").strip()
labelRaw = rec.get("label")
label = (labelRaw or "").strip() if labelRaw is not None else ""
if not label:
label = name if name else "Mandate"
db.recordModify(Mandate, mid, {"label": label})
labelFills += 1
logger.info(f"Mandate {mid}: filled empty label with '{label}'")
nameFits = isValidMandateName(name)
nameCollides = name in used
if nameFits and not nameCollides:
used.add(name)
continue
base = slugifyMandateName(label) or "mn"
newName = allocateUniqueMandateSlug(base, used)
used.add(newName)
if newName != name:
db.recordModify(Mandate, mid, {"name": newName})
nameRenames.append((str(mid), name, newName))
logger.info(f"Mandate {mid}: renamed name '{name}' -> '{newName}'")
if labelFills or nameRenames:
logger.info(
"Mandate name/label slug migration: %d label fill-in(s), %d name rename(s)",
labelFills, len(nameRenames),
)
else:
logger.debug("No mandate name/label slug migration needed")
def initAdminUser(db: DatabaseConnector, mandateId: Optional[str]) -> Optional[str]:
"""
Creates the Admin user if it doesn't exist.
Admin user gets BOTH platform flags:
- isSysAdmin=True (Infrastructure: logs/tokens/DB-health)
- isPlatformAdmin=True (Cross-Mandate-Governance: user/mandate/RBAC mgmt)
Admin user gets isSysAdmin=True for system-level access.
Role assignment is done via UserMandate + UserMandateRole in assignInitialUserMemberships().
Args:
db: Database connector instance
mandateId: Root mandate ID (for membership assignment, not on User)
Returns:
User ID if created or found, None otherwise
"""
existingUsers = db.getRecordset(UserInDB, recordFilter={"username": "admin"})
if existingUsers:
userId = existingUsers[0].get("id")
updates: Dict[str, bool] = {}
if not existingUsers[0].get("isSysAdmin", False):
updates["isSysAdmin"] = True
if not existingUsers[0].get("isPlatformAdmin", False):
updates["isPlatformAdmin"] = True
if updates:
logger.info(f"Updating admin user {userId} platform flags: {updates}")
db.recordModify(UserInDB, userId, updates)
existingIsSysAdmin = existingUsers[0].get("isSysAdmin", False)
# Ensure admin user has isSysAdmin=True
if not existingIsSysAdmin:
logger.info(f"Updating admin user {userId} to set isSysAdmin=True")
db.recordModify(UserInDB, userId, {"isSysAdmin": True})
logger.info(f"Admin user already exists with ID {userId}")
return userId
logger.info("Creating Admin user")
adminUser = UserInDB(
username="admin",
@ -526,7 +453,6 @@ def initAdminUser(db: DatabaseConnector, mandateId: Optional[str]) -> Optional[s
enabled=True,
language="en",
isSysAdmin=True,
isPlatformAdmin=True,
authenticationAuthority=AuthAuthority.LOCAL,
hashedPassword=_getPasswordHash(APP_CONFIG.get("APP_INIT_PASS_ADMIN_SECRET")),
)
@ -539,30 +465,22 @@ def initAdminUser(db: DatabaseConnector, mandateId: Optional[str]) -> Optional[s
def initEventUser(db: DatabaseConnector, mandateId: Optional[str]) -> Optional[str]:
"""
Creates the Event user if it doesn't exist.
Event user gets isSysAdmin=True for infrastructure-level operations
(system events, internal callbacks). It does NOT need cross-mandate
governance, so isPlatformAdmin is left False.
Event user gets isSysAdmin=True for system operations.
Role assignment is done via UserMandate + UserMandateRole in assignInitialUserMemberships().
Args:
db: Database connector instance
mandateId: Root mandate ID (for membership assignment, not on User)
Returns:
User ID if created or found, None otherwise
"""
existingUsers = db.getRecordset(UserInDB, recordFilter={"username": "event"})
if existingUsers:
userId = existingUsers[0].get("id")
# Defensive: revoke any historic platform-admin grant on the event user
if existingUsers[0].get("isPlatformAdmin", False):
logger.warning(
f"Event user {userId} had isPlatformAdmin=True; "
f"revoking (event user is infrastructure-only)"
)
db.recordModify(UserInDB, userId, {"isPlatformAdmin": False})
logger.info(f"Event user already exists with ID {userId}")
return userId
logger.info("Creating Event user")
eventUser = UserInDB(
username="event",
@ -571,7 +489,6 @@ def initEventUser(db: DatabaseConnector, mandateId: Optional[str]) -> Optional[s
enabled=True,
language="en",
isSysAdmin=True,
isPlatformAdmin=False,
authenticationAuthority=AuthAuthority.LOCAL,
hashedPassword=_getPasswordHash(APP_CONFIG.get("APP_INIT_PASS_EVENT_SECRET")),
)
@ -585,19 +502,20 @@ def initRoles(db: DatabaseConnector) -> None:
"""
Initialize standard roles if they don't exist.
Roles are created as GLOBAL (mandateId=None) template roles.
NOTE: There is no platform-level "sysadmin" role any more platform
authority lives on the User record via ``isSysAdmin`` and
``isPlatformAdmin``. These template roles (admin/user/viewer) are
purely for mandate/feature-level access control.
NOTE: The "sysadmin" role is NOT a template - it's created separately in
_initSysAdminRole() as a root-mandate-specific role (isSystemRole=False).
These template roles (admin/user/viewer) are for mandate/feature-level access control.
Args:
db: Database connector instance
"""
logger.info("Initializing roles")
global _roleIdCache
_roleIdCache = {}
# Standard template roles for mandate/feature-level access
# NOTE: "sysadmin" role is created separately in _initSysAdminRole (root mandate only)
standardRoles = [
Role(
roleLabel="admin",
@ -815,99 +733,145 @@ def copySystemRolesToMandate(db: DatabaseConnector, mandateId: str) -> int:
return copiedCount
def _migrateAndDropSysAdminRole(db: DatabaseConnector, mandateId: str) -> None:
def _initSysAdminRole(db: DatabaseConnector, mandateId: str) -> Optional[str]:
"""
One-shot migration: eliminate the legacy ``sysadmin`` role in the root mandate.
Authority semantics moved to two orthogonal flags on User:
- ``isSysAdmin`` Infrastructure-Operator (RBAC bypass)
- ``isPlatformAdmin`` Cross-Mandate-Governance (no bypass)
Migration steps (idempotent):
1. Find sysadmin role(s) in root mandate. If none exist done.
2. For every UserMandateRole row referencing such a role: set
``user.isPlatformAdmin = True`` (preserves cross-mandate authority).
3. Delete those UserMandateRole rows.
4. Delete AccessRules attached to the sysadmin role.
5. Delete the sysadmin Role record.
Initialize the sysadmin role in the root mandate.
The sysadmin role is a mandate-specific role (NOT a system template) that provides
full administrative access via RBAC. It only exists in the root mandate and is
NOT copied to other mandates (isSystemRole=False).
Hybrid model:
- User.isSysAdmin flag true system operations (Category A: tokens, logs, databases)
- sysadmin role admin operations via RBAC (Categories B/C/D/E)
Args:
db: Database connector instance
mandateId: Root mandate ID
Returns:
Sysadmin role ID or None
"""
sysadminRoles = db.getRecordset(
# Check if sysadmin role already exists in root mandate
existingRoles = db.getRecordset(
Role,
recordFilter={"roleLabel": "sysadmin", "mandateId": mandateId, "featureInstanceId": None},
recordFilter={"roleLabel": "sysadmin", "mandateId": mandateId, "featureInstanceId": None}
)
if not sysadminRoles:
logger.debug("Sysadmin role migration: no legacy sysadmin role present, nothing to do")
if existingRoles:
sysadminRoleId = existingRoles[0].get("id")
logger.info(f"Sysadmin role already exists in root mandate with ID {sysadminRoleId}")
# Ensure AccessRules exist (migration safety)
_ensureSysAdminAccessRules(db, sysadminRoleId)
return sysadminRoleId
# Create sysadmin role in root mandate
logger.info("Creating sysadmin role in root mandate")
sysadminRole = Role(
roleLabel="sysadmin",
description=coerce_text_multilingual("System-Administrator - Vollständiger administrativer Zugriff über alle Mandanten"),
mandateId=mandateId,
featureInstanceId=None,
featureCode=None,
isSystemRole=False # NOT a template → NOT copied to other mandates
)
createdRole = db.recordCreate(Role, sysadminRole)
sysadminRoleId = createdRole.get("id")
logger.info(f"Created sysadmin role with ID {sysadminRoleId}")
# Create AccessRules for sysadmin role
_createSysAdminAccessRules(db, sysadminRoleId)
return sysadminRoleId
def _createSysAdminAccessRules(db: DatabaseConnector, sysadminRoleId: str) -> None:
"""
Create AccessRules for the sysadmin role.
DATA + RESOURCE: generic item=None (full access).
UI: NO generic rule here explicit ui.admin.* rules are created by
_ensureUiContextRules() (same logic as admin role).
Args:
db: Database connector instance
sysadminRoleId: Sysadmin role ID
"""
rules = [
# DATA: Full access to all data tables (generic rule, item=None)
AccessRule(
roleId=sysadminRoleId,
context=AccessRuleContext.DATA,
item=None,
view=True,
read=AccessLevel.ALL,
create=AccessLevel.ALL,
update=AccessLevel.ALL,
delete=AccessLevel.ALL,
),
# RESOURCE: Access to all system resources (generic rule, item=None)
AccessRule(
roleId=sysadminRoleId,
context=AccessRuleContext.RESOURCE,
item=None,
view=True,
read=None,
create=None,
update=None,
delete=None,
),
]
for rule in rules:
db.recordCreate(AccessRule, rule)
logger.info(f"Created {len(rules)} AccessRules for sysadmin role (UI rules via _ensureUiContextRules)")
def _ensureSysAdminAccessRules(db: DatabaseConnector, sysadminRoleId: str) -> None:
"""
Ensure AccessRules exist for the sysadmin role (migration safety).
Creates missing rules without duplicating existing ones.
Args:
db: Database connector instance
sysadminRoleId: Sysadmin role ID
"""
existingRules = db.getRecordset(AccessRule, recordFilter={"roleId": sysadminRoleId})
if not existingRules:
logger.info("No AccessRules found for sysadmin role, creating them")
_createSysAdminAccessRules(db, sysadminRoleId)
return
sysadminRoleIds = [str(r.get("id")) for r in sysadminRoles if r.get("id")]
logger.warning(
f"Sysadmin role migration: found {len(sysadminRoleIds)} legacy sysadmin role(s) "
f"in root mandate, migrating to isPlatformAdmin flag"
)
# 1) Promote every holder to isPlatformAdmin=True
promoted = 0
for sysadminRoleId in sysadminRoleIds:
umRoleRows = db.getRecordset(
UserMandateRole, recordFilter={"roleId": sysadminRoleId}
)
userMandateIds = [str(r.get("userMandateId")) for r in umRoleRows if r.get("userMandateId")]
if not userMandateIds:
continue
# Resolve userIds via UserMandate
userIds = set()
for umId in userMandateIds:
ums = db.getRecordset(UserMandate, recordFilter={"id": umId})
for um in ums:
uid = um.get("userId") if isinstance(um, dict) else getattr(um, "userId", None)
if uid:
userIds.add(str(uid))
for userId in userIds:
users = db.getRecordset(UserInDB, recordFilter={"id": userId})
if not users:
continue
current = users[0].get("isPlatformAdmin", False)
if not current:
db.recordModify(UserInDB, userId, {"isPlatformAdmin": True})
promoted += 1
logger.warning(
f"Sysadmin role migration: granted isPlatformAdmin=True to user {userId}"
)
# 2) Delete UserMandateRole rows
for umRow in umRoleRows:
rowId = umRow.get("id") if isinstance(umRow, dict) else getattr(umRow, "id", None)
if rowId:
try:
db.recordDelete(UserMandateRole, str(rowId))
except Exception as e:
logger.error(f"Sysadmin role migration: failed to drop UserMandateRole {rowId}: {e}")
# 3) Delete AccessRules
accessRules = db.getRecordset(AccessRule, recordFilter={"roleId": sysadminRoleId})
for ar in accessRules:
arId = ar.get("id") if isinstance(ar, dict) else getattr(ar, "id", None)
if arId:
try:
db.recordDelete(AccessRule, str(arId))
except Exception as e:
logger.error(f"Sysadmin role migration: failed to drop AccessRule {arId}: {e}")
# 4) Delete the Role
try:
db.recordDelete(Role, sysadminRoleId)
except Exception as e:
logger.error(f"Sysadmin role migration: failed to drop Role {sysadminRoleId}: {e}")
logger.warning(
f"Sysadmin role migration: completed; promoted {promoted} user(s) to isPlatformAdmin"
)
# Check for DATA and RESOURCE contexts (UI is handled by _ensureUiContextRules)
existingContexts = {r.get("context") for r in existingRules}
missingRules = []
if AccessRuleContext.DATA.value not in existingContexts:
missingRules.append(AccessRule(
roleId=sysadminRoleId,
context=AccessRuleContext.DATA,
item=None,
view=True,
read=AccessLevel.ALL,
create=AccessLevel.ALL,
update=AccessLevel.ALL,
delete=AccessLevel.ALL,
))
if AccessRuleContext.RESOURCE.value not in existingContexts:
missingRules.append(AccessRule(
roleId=sysadminRoleId,
context=AccessRuleContext.RESOURCE,
item=None,
view=True,
read=None, create=None, update=None, delete=None,
))
if missingRules:
for rule in missingRules:
db.recordCreate(AccessRule, rule)
logger.info(f"Created {len(missingRules)} missing AccessRules for sysadmin role")
def _getRoleId(db: DatabaseConnector, roleLabel: str) -> Optional[str]:
@ -975,9 +939,8 @@ def _createDefaultRoleRules(db: DatabaseConnector) -> None:
Create default role rules for generic access (item = null).
Uses roleId instead of roleLabel.
NOTE: There is no sysadmin role any more platform/infra authority is
governed by the ``isSysAdmin`` / ``isPlatformAdmin`` flags on the User
record. These default rules cover admin/user/viewer template roles.
NOTE: Sysadmin role rules are created separately in _initSysAdminRole().
These default rules cover admin/user/viewer template roles.
Args:
db: Database connector instance
@ -1027,16 +990,15 @@ def _createTableSpecificRules(db: DatabaseConnector) -> None:
These rules override generic rules for specific tables.
Uses roleId instead of roleLabel.
NOTE: There is no sysadmin role any more platform/infra authority is
governed by the ``isSysAdmin`` / ``isPlatformAdmin`` flags on the User
record. These table-specific rules cover admin/user/viewer template roles.
NOTE: Sysadmin role rules are created separately in _initSysAdminRole().
These table-specific rules cover admin/user/viewer template roles.
Args:
db: Database connector instance
"""
tableRules = []
# Get role IDs for template roles (platform authority lives on User flags)
# Get role IDs for template roles (sysadmin is a separate mandate-level role)
adminId = _getRoleId(db, "admin")
userId = _getRoleId(db, "user")
viewerId = _getRoleId(db, "viewer")
@ -1507,7 +1469,8 @@ def _ensureUiContextRules(db: DatabaseConnector) -> None:
mandateAdminRoleIds = []
mandateUserRoleIds = []
mandateViewerRoleIds = []
sysadminRoleIds = []
mandateRoles = db.getRecordset(
Role,
recordFilter={"isSystemRole": False, "featureInstanceId": None}
@ -1523,12 +1486,12 @@ def _ensureUiContextRules(db: DatabaseConnector) -> None:
mandateUserRoleIds.append(roleId)
elif label == "viewer":
mandateViewerRoleIds.append(roleId)
# All role IDs per level (template + mandate-instance).
# Admin-only navigation items are governed by these admin roles plus the
# ``isPlatformAdmin`` flag (checked in routes via requirePlatformAdmin),
# NOT by a dedicated platform-level role.
allAdminRoleIds = ([adminId] if adminId else []) + mandateAdminRoleIds
elif label == "sysadmin":
sysadminRoleIds.append(roleId)
# All role IDs per level (template + mandate-instance)
# sysadmin gets ALL UI rules (admin-only + public) — same logic, explicit rules
allAdminRoleIds = ([adminId] if adminId else []) + mandateAdminRoleIds + sysadminRoleIds
allUserRoleIds = ([userId] if userId else []) + mandateUserRoleIds
allViewerRoleIds = ([viewerId] if viewerId else []) + mandateViewerRoleIds
@ -1546,16 +1509,12 @@ def _ensureUiContextRules(db: DatabaseConnector) -> None:
if roleId and item:
existingCombinations.add((roleId, item))
# Check each navigation item and add missing rules (including subgroup items)
# Check each navigation item and add missing rules
missingRules = []
for section in NAVIGATION_SECTIONS:
isAdminSection = section.get("adminOnly", False)
allItems = list(section.get("items", []))
for subgroup in section.get("subgroups", []):
allItems.extend(subgroup.get("items", []))
for item in allItems:
for item in section.get("items", []):
objectKey = item.get("objectKey")
if not objectKey:
continue
@ -1896,7 +1855,7 @@ def _createStoreResourceRules(db: DatabaseConnector) -> None:
Store resources control which roles can activate features via the Store.
- admin/user: view=True (can see and activate store features)
- viewer: no store access
- isSysAdmin flag bypasses RBAC (rbac.py:getUserPermissions)
- sysadmin: covered by generic RESOURCE rule (item=None, view=True)
Args:
db: Database connector instance
@ -1905,7 +1864,6 @@ def _createStoreResourceRules(db: DatabaseConnector) -> None:
"resource.store.teamsbot",
"resource.store.workspace",
"resource.store.commcoach",
"resource.store.trustee",
]
storeRules = []
@ -2034,11 +1992,9 @@ def assignInitialUserMemberships(
Assign initial memberships to admin and event users via UserMandate + UserMandateRole.
This is the NEW multi-tenant way of assigning roles.
Initial users get the "admin" role in the root mandate. Platform-level
authority (cross-mandate governance + infrastructure ops) is conveyed via
the ``isSysAdmin`` / ``isPlatformAdmin`` flags on the User record itself
(see ``initAdminUser`` / ``initEventUser``).
Hybrid model: Initial users get BOTH the isSysAdmin flag (for system ops)
AND the "admin" + "sysadmin" roles in the root mandate (for RBAC-based admin ops).
Args:
db: Database connector instance
mandateId: Root mandate ID
@ -2056,7 +2012,13 @@ def assignInitialUserMemberships(
if not adminRoleId:
logger.warning(f"No mandate-level role found for mandate {mandateId}, skipping membership assignment")
return
# Find sysadmin role in root mandate (created by _initSysAdminRole)
sysadminRole = next((r for r in mandateRoles if r.get("roleLabel") == "sysadmin"), None)
sysadminRoleId = sysadminRole.get("id") if sysadminRole else None
if not sysadminRoleId:
logger.warning("Sysadmin role not found in root mandate - run _initSysAdminRole first")
for userId, userName in [(adminUserId, "admin"), (eventUserId, "event")]:
# Check if UserMandate already exists
existingMemberships = db.getRecordset(
@ -2091,6 +2053,20 @@ def assignInitialUserMemberships(
)
db.recordCreate(UserMandateRole, userMandateRole)
logger.info(f"Assigned admin role to {userName} user in mandate")
# Assign sysadmin role (in addition to admin role)
if sysadminRoleId:
existingSysadminRoles = db.getRecordset(
UserMandateRole,
recordFilter={"userMandateId": userMandateId, "roleId": sysadminRoleId}
)
if not existingSysadminRoles:
sysadminMandateRole = UserMandateRole(
userMandateId=userMandateId,
roleId=sysadminRoleId
)
db.recordCreate(UserMandateRole, sysadminMandateRole)
logger.info(f"Assigned sysadmin role to {userName} user in root mandate")
def _getPasswordHash(password: Optional[str]) -> Optional[str]:

View file

@ -17,7 +17,6 @@ import uuid
from modules.connectors.connectorDbPostgre import DatabaseConnector, _get_cached_connector
from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
from modules.security.rbac import RbacClass
@ -49,9 +48,6 @@ from modules.datamodels.datamodelNotification import UserNotification
logger = logging.getLogger(__name__)
appDatabase = "poweron_app"
registerDatabase(appDatabase)
# Singleton factory for AppObjects instances per context
_gatewayInterfaces = {}
@ -137,7 +133,7 @@ class AppObjects:
try:
# Get configuration values with defaults
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = appDatabase
dbDatabase = "poweron_app"
dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
@ -677,7 +673,6 @@ class AppObjects:
externalUsername: str = None,
externalEmail: str = None,
isSysAdmin: bool = False,
isPlatformAdmin: bool = False,
addExternalIdentityConnection: bool = True,
) -> User:
"""
@ -715,7 +710,6 @@ class AppObjects:
language=language,
enabled=enabled,
isSysAdmin=isSysAdmin,
isPlatformAdmin=isPlatformAdmin,
authenticationAuthority=authenticationAuthority,
hashedPassword=self._getPasswordHash(password) if password else None,
)
@ -757,21 +751,15 @@ class AppObjects:
logger.error(f"Unexpected error creating user: {str(e)}")
raise ValueError(f"Failed to create user: {str(e)}")
def updateUser(
self,
userId: str,
updateData: Union[Dict[str, Any], User],
allowAdminFlagChange: bool = False,
) -> User:
def updateUser(self, userId: str, updateData: Union[Dict[str, Any], User], allowSysAdminChange: bool = False) -> User:
"""Update a user's information.
Args:
userId: ID of the user to update
updateData: User data to update (dict or User model)
allowAdminFlagChange: If True, allows changing the privileged platform
flags ``isSysAdmin`` and ``isPlatformAdmin``.
Only set to True when called by a Platform Admin
explicitly changing another user's admin status.
allowSysAdminChange: If True, allows changing isSysAdmin field.
Only set to True when called by a SysAdmin explicitly
changing another user's admin status.
"""
try:
# Get user
@ -779,35 +767,20 @@ class AppObjects:
if not user:
raise ValueError(f"User {userId} not found")
# Convert updateData to dict if it's a User model.
#
# IMPORTANT: When the route layer passes a Pydantic ``User`` instance,
# ``model_dump()`` returns ALL fields — including those the client
# never sent — populated with Pydantic defaults (e.g. ``isSysAdmin=False``).
# That historical pattern caused silent flag flips on inline-toggles.
#
# The PUT route now ships a plain dict carrying ONLY the explicitly
# changed fields, so this branch should rarely fire; however internal
# callers (``disableUser`` / ``enableUser`` / migration scripts) still
# use dict-style partials and must remain partial-safe.
# Convert updateData to dict if it's a User model
if isinstance(updateData, User):
updateDict = updateData.model_dump(exclude_unset=True)
# Fallback for legacy callers that constructed a fully-defaulted
# User: if nothing was marked as explicitly set, treat the dump
# as authoritative but DROP privileged flags unconditionally
# unless allowAdminFlagChange is True.
if not updateDict:
updateDict = updateData.model_dump()
updateDict = updateData.model_dump()
else:
updateDict = updateData.copy() if isinstance(updateData, dict) else dict(updateData)
updateDict = updateData.copy() if isinstance(updateData, dict) else updateData
# Remove id field from updateDict if present - we'll use userId from parameter
updateDict.pop("id", None)
# SECURITY: Protect privileged platform flags from accidental
# overwrite via profile forms or partial payloads from clients
# whose model defaults could pull the value down to False.
protectedFields = ["isSysAdmin", "isPlatformAdmin"]
if not allowAdminFlagChange:
# SECURITY: Protect sensitive fields from being overwritten by profile updates.
# These fields should only be changed explicitly by admins, not through
# profile forms where they might be sent as default values (e.g., isSysAdmin=False).
protectedFields = ["isSysAdmin"]
if not allowSysAdminChange:
for field in protectedFields:
updateDict.pop(field, None)
@ -1479,56 +1452,16 @@ class AppObjects:
return Mandate(**filteredMandates[0])
def _existingMandateNames(self, excludeId: Optional[str] = None) -> List[str]:
"""Return all mandate.name values currently in the DB (optionally excluding one id)."""
out: List[str] = []
for r in self.db.getRecordset(Mandate):
if excludeId and str(r.get("id")) == str(excludeId):
continue
n = r.get("name")
if n:
out.append(n)
return out
def _generateUniqueMandateName(self, label: str, excludeId: Optional[str] = None) -> str:
"""Generate a slug from *label* that is unique across all mandates (Phase 3 helper)."""
from modules.shared.mandateNameUtils import allocateUniqueMandateSlug, slugifyMandateName
base = slugifyMandateName(label or "")
return allocateUniqueMandateSlug(base, self._existingMandateNames(excludeId=excludeId))
def createMandate(self, name: str = None, label: str = None, enabled: bool = True) -> Mandate:
def createMandate(self, name: str, label: str = None, enabled: bool = True) -> Mandate:
"""
Creates a new mandate if user has permission.
Automatically copies system template roles (admin, user, viewer) to the new mandate.
``label`` (Voller Name) is required (non-empty). If ``name`` (Kurzzeichen) is omitted or empty,
a unique slug is generated from the label; otherwise it is validated and uniqueness-checked.
"""
if not self.checkRbacPermission(Mandate, "create"):
raise PermissionError("No permission to create mandates")
from modules.shared.mandateNameUtils import isValidMandateName
effLabel = (label or "").strip() if label is not None else ""
if not effLabel and name:
effLabel = (name or "").strip()
if not effLabel:
raise ValueError("Mandate label (Voller Name) is required")
rawName = (name or "").strip() if name else ""
if not rawName:
rawName = self._generateUniqueMandateName(effLabel)
else:
if not isValidMandateName(rawName):
raise ValueError(
"Mandate Kurzzeichen must be 232 characters: lowercase az, digits, "
"hyphens only (single-hyphen segments)."
)
if rawName in self._existingMandateNames():
raise ValueError(f"Mandate Kurzzeichen '{rawName}' is already in use")
mandateData = Mandate(name=rawName, label=effLabel, enabled=enabled)
# Create mandate data using model
mandateData = Mandate(name=name, label=label, enabled=enabled)
# Create mandate record
createdRecord = self.db.recordCreate(Mandate, mandateData)
@ -1547,31 +1480,24 @@ class AppObjects:
return Mandate(**createdRecord)
def _provisionMandateForUser(self, userId: str, mandateLabel: str, planKey: str) -> Dict[str, Any]:
def _provisionMandateForUser(self, userId: str, mandateName: str, planKey: str) -> Dict[str, Any]:
"""
Atomic provisioning: create Mandate + UserMandate + Subscription + auto-create FeatureInstances.
Internal method bypasses RBAC (used during registration when user has no permissions yet).
``mandateLabel`` is the display name (Voller Name); a unique slug ``name`` (Kurzzeichen) is derived.
"""
from modules.datamodels.datamodelSubscription import MandateSubscription, SubscriptionStatusEnum, BUILTIN_PLANS
from modules.datamodels.datamodelFeatures import FeatureInstance
from modules.interfaces.interfaceBootstrap import copySystemRolesToMandate
from modules.interfaces.interfaceFeatures import getFeatureInterface
from modules.system.registry import loadFeatureMainModules
plan = BUILTIN_PLANS.get(planKey)
if not plan:
raise ValueError(f"Unknown plan: {planKey}")
effLabel = (mandateLabel or "").strip()
if not effLabel:
raise ValueError("mandateLabel (Voller Name) is required for provisioning")
uniqueName = self._generateUniqueMandateName(effLabel)
mandateData = Mandate(
name=uniqueName,
label=effLabel,
name=mandateName,
label=mandateName,
enabled=True,
isSystem=False,
)
@ -1744,17 +1670,7 @@ class AppObjects:
return activated
def updateMandate(self, mandateId: str, updateData: Dict[str, Any]) -> Mandate:
"""
Updates a mandate if user has access.
Field-level rules:
- ``id`` always immutable.
- ``isSystem`` only sysadmin.
- ``name`` (Kurzzeichen) only platform/sysadmin; format and uniqueness are validated.
- ``label`` (Voller Name) must be non-empty if provided.
"""
from modules.shared.mandateNameUtils import isValidMandateName
"""Updates a mandate if user has access."""
try:
# First check if user has permission to modify mandates
if not self.checkRbacPermission(Mandate, "update", mandateId):
@ -1765,33 +1681,11 @@ class AppObjects:
if not mandate:
raise ValueError(f"Mandate {mandateId} not found")
_isSysAdmin = bool(getattr(self.currentUser, "isSysAdmin", False))
_isPlatformAdmin = bool(getattr(self.currentUser, "isPlatformAdmin", False))
_protectedFields = {"id"}
if not _isSysAdmin:
if not getattr(self.currentUser, "isSysAdmin", False):
_protectedFields.add("isSystem")
if not (_isSysAdmin or _isPlatformAdmin):
_protectedFields.add("name")
_sanitizedData = {k: v for k, v in updateData.items() if k not in _protectedFields}
if "name" in _sanitizedData:
newName = (_sanitizedData["name"] or "").strip()
if not isValidMandateName(newName):
raise ValueError(
"Mandate Kurzzeichen must be 232 characters: lowercase az, digits, "
"hyphens only (single-hyphen segments)."
)
if newName != mandate.name and newName in self._existingMandateNames(excludeId=mandateId):
raise ValueError(f"Mandate Kurzzeichen '{newName}' is already in use")
_sanitizedData["name"] = newName
if "label" in _sanitizedData:
newLabel = (_sanitizedData["label"] or "").strip()
if not newLabel:
raise ValueError("Mandate Voller Name (label) must not be empty.")
_sanitizedData["label"] = newLabel
# Update mandate data using model
updatedData = mandate.model_dump()
updatedData.update(_sanitizedData)
@ -2000,12 +1894,11 @@ class AppObjects:
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import (
AutoWorkflow, AutoVersion, AutoRun, AutoStepLog, AutoTask,
)
from modules.features.graphicalEditor.interfaceFeatureGraphicalEditor import graphicalEditorDatabase
from modules.connectors.connectorDbPostgre import DatabaseConnector
geDb = DatabaseConnector(
dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
dbDatabase=graphicalEditorDatabase,
dbDatabase="poweron_graphicaleditor",
dbUser=APP_CONFIG.get("DB_USER"),
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"),
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),

View file

@ -14,7 +14,6 @@ import uuid
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
from modules.shared.timeUtils import getUtcTimestamp
from modules.datamodels.datamodelUam import User, Mandate
from modules.datamodels.datamodelMembership import UserMandate
@ -110,7 +109,6 @@ _billingInterfaces: Dict[str, "BillingObjects"] = {}
# Database name for billing
BILLING_DATABASE = "poweron_billing"
registerDatabase(BILLING_DATABASE)
def getInterface(currentUser: User, mandateId: str = None) -> "BillingObjects":
@ -1542,40 +1540,16 @@ class BillingObjects:
if not accountIds:
return PaginatedResult(items=[], totalItems=0, totalPages=0)
# Extract free-text search term and run a custom query that covers
# enriched columns (mandateName, userName) and the numeric amount
# column. The generic SQL search only covers TEXT columns of the
# BillingTransaction table, which excludes these fields.
searchTerm: Optional[str] = None
if mappedPagination and mappedPagination.filters:
raw = mappedPagination.filters.get("search")
if isinstance(raw, str) and raw.strip():
searchTerm = raw.strip()
recordFilter: Dict[str, Any] = {"accountId": accountIds}
if userId:
recordFilter["createdByUserId"] = userId
if searchTerm:
searchResult = self._searchTransactionsPaginated(
allAccounts=allAccounts,
accountIds=accountIds,
userId=userId,
searchTerm=searchTerm,
pagination=mappedPagination,
)
pageItems = searchResult["items"]
totalItems = searchResult["totalItems"]
totalPages = searchResult["totalPages"]
else:
recordFilter: Dict[str, Any] = {"accountId": accountIds}
if userId:
recordFilter["createdByUserId"] = userId
result = self.db.getRecordsetPaginated(
BillingTransaction,
pagination=mappedPagination,
recordFilter=recordFilter,
)
pageItems = result.get("items", []) if isinstance(result, dict) else result.items
totalItems = result.get("totalItems", 0) if isinstance(result, dict) else result.totalItems
totalPages = result.get("totalPages", 0) if isinstance(result, dict) else result.totalPages
result = self.db.getRecordsetPaginated(
BillingTransaction,
pagination=mappedPagination,
recordFilter=recordFilter,
)
pageItems = result.get("items", []) if isinstance(result, dict) else result.items
accountMap = {a.get("id"): a for a in allAccounts}
@ -1618,186 +1592,15 @@ class BillingObjects:
row["userName"] = userMap.get(txUserId, txUserId) if txUserId else None
enriched.append(row)
totalItems = result.get("totalItems", 0) if isinstance(result, dict) else result.totalItems
totalPages = result.get("totalPages", 0) if isinstance(result, dict) else result.totalPages
return PaginatedResult(items=enriched, totalItems=totalItems, totalPages=totalPages)
except Exception as e:
logger.error(f"Error in getTransactionsForMandatesPaginated: {e}")
return PaginatedResult(items=[], totalItems=0, totalPages=0)
def _searchTransactionsPaginated(
self,
allAccounts: List[Dict[str, Any]],
accountIds: List[str],
userId: Optional[str],
searchTerm: str,
pagination: PaginationParams,
) -> Dict[str, Any]:
"""
Custom paginated search for BillingTransaction that also covers the
enriched columns `mandateName` and `userName` as well as the numeric
`amount` column. Resolves matching mandate/user IDs via the app DB
first, then builds a single SQL query with OR-combined conditions.
"""
import math
from modules.connectors.connectorDbPostgre import _get_model_fields, _parseRecordFields
from modules.datamodels.datamodelUam import UserInDB
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
table = BillingTransaction.__name__
fields = _get_model_fields(BillingTransaction)
pattern = f"%{searchTerm}%"
# Resolve matching user / mandate IDs via the app DB (which is separate
# from the billing DB and hosts UserInDB / Mandate tables).
matchingUserIds: List[str] = []
matchingMandateIds: List[str] = []
try:
appInterface = getAppInterface(self.currentUser)
appInterface.db._ensure_connection()
with appInterface.db.connection.cursor() as cur:
if appInterface.db._ensureTableExists(UserInDB):
cur.execute(
'SELECT "id" FROM "UserInDB" WHERE '
'COALESCE("username", \'\') ILIKE %s OR '
'COALESCE("fullName", \'\') ILIKE %s OR '
'COALESCE("email", \'\') ILIKE %s',
(pattern, pattern, pattern),
)
matchingUserIds = [r["id"] for r in cur.fetchall() if r.get("id")]
if appInterface.db._ensureTableExists(Mandate):
cur.execute(
'SELECT "id" FROM "Mandate" WHERE '
'COALESCE("label", \'\') ILIKE %s OR '
'COALESCE("name", \'\') ILIKE %s',
(pattern, pattern),
)
matchingMandateIds = [r["id"] for r in cur.fetchall() if r.get("id")]
except Exception as e:
logger.warning(f"_searchTransactionsPaginated: user/mandate resolution failed: {e}")
matchingAccountIds = [
a.get("id") for a in allAccounts
if a.get("id") and a.get("mandateId") in set(matchingMandateIds)
]
# Try to interpret the search term as a number for amount matching.
amountVal: Optional[float] = None
try:
amountVal = float(searchTerm.replace(",", "."))
except Exception:
amountVal = None
whereParts: List[str] = ['"accountId" = ANY(%s)']
whereValues: List[Any] = [accountIds]
if userId:
whereParts.append('"createdByUserId" = %s')
whereValues.append(userId)
# Apply non-search filters from pagination (reuse existing builder for
# everything except the `search` key which we handle explicitly).
import copy
paginationWithoutSearch = copy.deepcopy(pagination) if pagination else None
if paginationWithoutSearch and paginationWithoutSearch.filters:
paginationWithoutSearch.filters = {
k: v for k, v in paginationWithoutSearch.filters.items() if k != "search"
}
orParts: List[str] = []
orValues: List[Any] = []
textCols = [c for c, t in fields.items() if t == "TEXT"]
for col in textCols:
orParts.append(f'COALESCE("{col}"::TEXT, \'\') ILIKE %s')
orValues.append(pattern)
if matchingUserIds:
orParts.append('"createdByUserId" = ANY(%s)')
orValues.append(matchingUserIds)
if matchingAccountIds:
orParts.append('"accountId" = ANY(%s)')
orValues.append(matchingAccountIds)
orParts.append('"amount"::TEXT ILIKE %s')
orValues.append(pattern)
if amountVal is not None:
orParts.append('"amount" = %s')
orValues.append(amountVal)
whereParts.append(f"({' OR '.join(orParts)})")
whereValues.extend(orValues)
# Apply remaining structured filters via the generic helper by feeding
# it a dummy pagination that does NOT include LIMIT/OFFSET. We only
# need the WHERE contribution for the non-search filters here.
extraWhere = ""
extraValues: List[Any] = []
if paginationWithoutSearch and paginationWithoutSearch.filters:
try:
fromPagination = copy.deepcopy(paginationWithoutSearch)
fromPagination.sort = []
fromPagination.page = 1
fromPagination.pageSize = 1
ew, _, _, values, _ = self.db._buildPaginationClauses(
BillingTransaction, fromPagination, recordFilter=None
)
if ew:
extraWhere = ew.replace(" WHERE ", " AND ", 1)
extraValues = list(values)
except Exception as e:
logger.warning(f"_searchTransactionsPaginated: extra-filter build failed: {e}")
whereClause = " WHERE " + " AND ".join(whereParts) + extraWhere
whereValues.extend(extraValues)
# Build ORDER BY from pagination.sort
validColumns = set(fields.keys())
orderParts: List[str] = []
if pagination and pagination.sort:
for sf in pagination.sort:
sfField = sf.get("field") if isinstance(sf, dict) else getattr(sf, "field", None)
sfDir = sf.get("direction", "asc") if isinstance(sf, dict) else getattr(sf, "direction", "asc")
if sfField and sfField in validColumns:
direction = "DESC" if str(sfDir).lower() == "desc" else "ASC"
colType = fields.get(sfField, "TEXT")
if colType == "BOOLEAN":
orderParts.append(f'COALESCE("{sfField}", FALSE) {direction}')
else:
orderParts.append(f'"{sfField}" {direction} NULLS LAST')
if not orderParts:
orderParts.append('"id"')
orderClause = " ORDER BY " + ", ".join(orderParts)
pageSize = pagination.pageSize if pagination else 50
page = pagination.page if pagination else 1
offset = (page - 1) * pageSize
limitClause = f" LIMIT {pageSize} OFFSET {offset}"
try:
self.db._ensure_connection()
with self.db.connection.cursor() as cur:
countSql = f'SELECT COUNT(*) FROM "{table}"{whereClause}'
cur.execute(countSql, whereValues)
totalItems = cur.fetchone()["count"]
dataSql = f'SELECT * FROM "{table}"{whereClause}{orderClause}{limitClause}'
cur.execute(dataSql, whereValues)
records = [dict(row) for row in cur.fetchall()]
for rec in records:
_parseRecordFields(rec, fields, f"search table {table}")
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
return {"items": records, "totalItems": totalItems, "totalPages": totalPages}
except Exception as e:
logger.error(f"_searchTransactionsPaginated SQL error: {e}", exc_info=True)
try:
self.db.connection.rollback()
except Exception:
pass
return {"items": [], "totalItems": 0, "totalPages": 0}
def _buildScopeFilter(
self,
mandateIds: Optional[List[str]],

View file

@ -29,7 +29,6 @@ from modules.datamodels.datamodelUam import User
# DYNAMIC PART: Connectors to the Interface
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.dbRegistry import registerDatabase
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
from modules.datamodels.datamodelPagination import PaginationParams, PaginatedResult
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
@ -38,9 +37,6 @@ from modules.interfaces.interfaceRbac import getRecordsetWithRBAC
from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__)
chatDatabase = "poweron_chat"
registerDatabase(chatDatabase)
# Singleton factory for Chat instances
_chatInterfaces = {}
@ -318,7 +314,7 @@ class ChatObjects:
try:
# Get configuration values with defaults
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = chatDatabase
dbDatabase = "poweron_chat"
dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
@ -655,27 +651,17 @@ class ChatObjects:
totalPages=totalPages
)
def getLastMessageTimestamp(self, workflowId: str) -> Optional[float]:
"""
Return the latest publishedAt/sysCreatedAt from ChatMessage for a workflow
as UTC seconds (float) matches the timestamp format used across the
rest of the chat data model (lastActivity, startedAt, publishedAt).
"""
def getLastMessageTimestamp(self, workflowId: str) -> Optional[str]:
"""Return the latest publishedAt/sysCreatedAt from ChatMessage for a workflow."""
messages = self._getRecordset(ChatMessage, recordFilter={"workflowId": workflowId})
if not messages:
return None
latest: Optional[float] = None
latest = None
for msg in messages:
raw = msg.get("publishedAt") or msg.get("sysCreatedAt")
if raw is None:
continue
try:
ts = float(raw)
except (TypeError, ValueError):
continue
if latest is None or ts > latest:
ts = msg.get("publishedAt") or msg.get("sysCreatedAt")
if ts and (latest is None or str(ts) > str(latest)):
latest = ts
return latest
return str(latest) if latest else None
def searchWorkflowsByContent(self, query: str, limit: int = 50) -> List[str]:
"""Return workflow IDs whose messages contain the query string (case-insensitive)."""
@ -722,8 +708,6 @@ class ChatObjects:
return ChatWorkflow(
id=workflow["id"],
featureInstanceId=workflow.get("featureInstanceId"),
linkedWorkflowId=workflow.get("linkedWorkflowId"),
status=workflow.get("status", "running"),
name=workflow.get("name"),
currentRound=_toInt(workflow.get("currentRound")),
@ -740,54 +724,6 @@ class ChatObjects:
logger.error(f"getWorkflow: data validation failed for {workflowId}: {e}")
return None
def getWorkflowByLink(
self,
featureInstanceId: str,
linkedWorkflowId: str,
) -> Optional[ChatWorkflow]:
"""Return the ChatWorkflow linked to (featureInstanceId, linkedWorkflowId), if any.
Used by editor-style features (e.g. GraphicalEditor AI editor chat) to
find the persisted chat for a specific external entity (Automation2Workflow).
Falls under the same RBAC as ``getWorkflow``.
"""
if not featureInstanceId or not linkedWorkflowId:
return None
rows = self._getRecordset(
ChatWorkflow,
recordFilter={
"featureInstanceId": featureInstanceId,
"linkedWorkflowId": linkedWorkflowId,
},
) or []
if not rows:
return None
# Return the most recently active one if multiple ever exist (defensive).
rows.sort(key=lambda r: float(r.get("lastActivity") or r.get("startedAt") or 0), reverse=True)
return self.getWorkflow(rows[0]["id"])
def getOrCreateLinkedWorkflow(
self,
featureInstanceId: str,
linkedWorkflowId: str,
name: Optional[str] = None,
) -> ChatWorkflow:
"""Find or create the ChatWorkflow linked to a specific external entity.
Editor-style features call this once at the start of a chat exchange to
guarantee a 1:1 mapping between (featureInstanceId, linkedWorkflowId)
and a persisted ChatWorkflow row.
"""
existing = self.getWorkflowByLink(featureInstanceId, linkedWorkflowId)
if existing:
return existing
return self.createWorkflow({
"featureInstanceId": featureInstanceId,
"linkedWorkflowId": linkedWorkflowId,
"status": "active",
"name": name or "",
})
def createWorkflow(self, workflowData: Dict[str, Any]) -> ChatWorkflow:
"""Creates a new workflow if user has permission."""
if not self.checkRbacPermission(ChatWorkflow, "create"):
@ -835,8 +771,6 @@ class ChatObjects:
# Convert to ChatWorkflow model (empty related data for new workflow)
return ChatWorkflow(
id=created["id"],
featureInstanceId=created.get("featureInstanceId"),
linkedWorkflowId=created.get("linkedWorkflowId"),
status=created.get("status", "running"),
name=created.get("name"),
currentRound=created.get("currentRound", 0) or 0,

View file

@ -12,7 +12,6 @@ from datetime import datetime, timezone, timedelta
from typing import Dict, Any, List, Optional
from modules.connectors.connectorDbPostgre import _get_cached_connector
from modules.shared.dbRegistry import registerDatabase
from modules.datamodels.datamodelKnowledge import FileContentIndex, ContentChunk, RoundMemory, WorkflowMemory
from modules.datamodels.datamodelUam import User
from modules.shared.configuration import APP_CONFIG
@ -20,9 +19,6 @@ from modules.shared.timeUtils import getUtcTimestamp
logger = logging.getLogger(__name__)
knowledgeDatabase = "poweron_knowledge"
registerDatabase(knowledgeDatabase)
_instances: Dict[str, "KnowledgeObjects"] = {}
@ -38,7 +34,7 @@ class KnowledgeObjects:
def _initializeDatabase(self):
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = knowledgeDatabase
dbDatabase = "poweron_knowledge"
dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))

View file

@ -14,7 +14,6 @@ import mimetypes
from typing import Dict, Any, List, Optional, Union
from modules.connectors.connectorDbPostgre import DatabaseConnector, _get_cached_connector
from modules.shared.dbRegistry import registerDatabase
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC, getRecordsetPaginatedWithRBAC
from modules.security.rbac import RbacClass
from modules.datamodels.datamodelRbac import AccessRuleContext
@ -35,9 +34,6 @@ from modules.datamodels.datamodelPagination import PaginationParams, PaginatedRe
logger = logging.getLogger(__name__)
managementDatabase = "poweron_management"
registerDatabase(managementDatabase)
# Singleton factory for Management instances with AI service per context
_instancesManagement = {}
@ -131,7 +127,7 @@ class ComponentObjects:
try:
# Get configuration values with defaults
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
dbDatabase = managementDatabase
dbDatabase = "poweron_management"
dbUser = APP_CONFIG.get("DB_USER")
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
@ -635,8 +631,12 @@ class ComponentObjects:
# Prompt methods
def _isSysAdmin(self) -> bool:
"""Check if the current user has the isSysAdmin flag (infrastructure operator)."""
return bool(getattr(self.currentUser, 'isSysAdmin', False))
"""Check if the current user has sysadmin role (or isSysAdmin flag as fallback)."""
from modules.auth.authentication import _hasSysAdminRole
userId = getattr(self.currentUser, 'id', None)
if userId and _hasSysAdminRole(str(userId)):
return True
return hasattr(self.currentUser, 'isSysAdmin') and self.currentUser.isSysAdmin
def _enrichPromptsWithPermissions(self, prompts: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
"""Enrich prompts with row-level _permissions based on ownership and isSystem flag.
@ -1087,32 +1087,29 @@ class ComponentObjects:
return newfileName
counter += 1
def createFile(self, name: str, mimeType: str, content: bytes, folderId: Optional[str] = None) -> FileItem:
def createFile(self, name: str, mimeType: str, content: bytes) -> FileItem:
"""Creates a new file entry if user has permission. Computes fileHash and fileSize from content.
Duplicate check: if a file with the same user + fileHash + fileName already exists,
the existing file is returned instead of creating a new one.
Same hash with different name is allowed (intentional copy by user).
Args:
folderId: Optional parent folder ID. None/empty means the root folder.
"""
if not self.checkRbacPermission(FileItem, "create"):
raise PermissionError("No permission to create files")
# Compute file size and hash
fileSize = len(content)
fileHash = hashlib.sha256(content).hexdigest()
# Duplicate check: same user + same hash + same fileName → return existing
existingFile = self.checkForDuplicateFile(fileHash, name)
if existingFile:
logger.info(f"Duplicate file detected in createFile: '{name}' (hash={fileHash[:12]}...) for user {self.userId} — returning existing file {existingFile.id}")
return existingFile
# Ensure fileName is unique
uniqueName = self._generateUniquefileName(name)
mandateId = self.mandateId or ""
featureInstanceId = self.featureInstanceId or ""
@ -1123,11 +1120,6 @@ class ComponentObjects:
else:
scope = "personal"
# Normalize folderId: treat empty string as "no folder" (= root) NULL in DB
normalizedFolderId: Optional[str] = folderId
if isinstance(normalizedFolderId, str) and not normalizedFolderId.strip():
normalizedFolderId = None
fileItem = FileItem(
mandateId=mandateId,
featureInstanceId=featureInstanceId,
@ -1136,7 +1128,7 @@ class ComponentObjects:
mimeType=mimeType,
fileSize=fileSize,
fileHash=fileHash,
folderId=normalizedFolderId,
folderId="",
)
# Store in database
@ -1404,24 +1396,6 @@ class ComponentObjects:
self._validateFolderName(newName, folder.get("parentId"), excludeFolderId=folderId)
return self.db.recordModify(FileFolder, folderId, {"name": newName})
def updateFolder(self, folderId: str, updateData: Dict[str, Any]) -> bool:
"""
Update folder metadata (e.g. ``scope``, ``neutralize``). Owner-only,
same access model as renameFolder/moveFolder. Use ``renameFolder`` for
``name`` changes (uniqueness validation) and ``moveFolder`` for
``parentId`` changes (cycle/uniqueness validation).
"""
if not updateData:
return True
folder = self.getFolder(folderId)
if not folder:
raise FileNotFoundError(f"Folder {folderId} not found")
forbiddenKeys = {"id", "sysCreatedBy", "sysCreatedAt", "sysUpdatedAt"}
cleaned: Dict[str, Any] = {k: v for k, v in updateData.items() if k not in forbiddenKeys}
if "name" in cleaned:
self._validateFolderName(cleaned["name"], folder.get("parentId"), excludeFolderId=folderId)
return self.db.recordModify(FileFolder, folderId, cleaned)
def moveFolder(self, folderId: str, targetParentId: Optional[str] = None) -> bool:
"""Move a folder to a new parent, with circular reference and unique name checks."""
folder = self.getFolder(folderId)
@ -1868,44 +1842,39 @@ class ComponentObjects:
logger.error(f"Error getting file content: {str(e)}")
return None
def saveUploadedFile(self, fileContent: bytes, fileName: str, folderId: Optional[str] = None) -> tuple[FileItem, str]:
"""Saves an uploaded file if user has permission.
Args:
folderId: Optional parent folder ID. None means root folder.
"""
def saveUploadedFile(self, fileContent: bytes, fileName: str) -> tuple[FileItem, str]:
"""Saves an uploaded file if user has permission."""
try:
# Check file creation permission
if not self.checkRbacPermission(FileItem, "create"):
raise PermissionError("No permission to upload files")
logger.debug(f"Starting upload process for file: {fileName} (folderId={folderId!r})")
logger.debug(f"Starting upload process for file: {fileName}")
if not isinstance(fileContent, bytes):
logger.error(f"Invalid fileContent type: {type(fileContent)}")
raise ValueError(f"fileContent must be bytes, got {type(fileContent)}")
# Compute file hash to check for duplicates before any DB writes
fileHash = hashlib.sha256(fileContent).hexdigest()
# Duplicate check: same user + same fileHash + same fileName → return existing file
# Same hash with different name is allowed (intentional copy by user)
existingFile = self.checkForDuplicateFile(fileHash, fileName)
if existingFile:
logger.info(f"Duplicate detected for user {self.userId}: '{fileName}' with hash {fileHash[:12]}... — returning existing file {existingFile.id}")
return existingFile, "exact_duplicate"
# Determine MIME type
mimeType = self.getMimeType(fileName)
# createFile handles its own duplicate check (for calls from other code paths)
# Here we already checked, so this will create a new file
logger.debug(f"Saving file metadata to database for file: {fileName}")
fileItem = self.createFile(
name=fileName,
mimeType=mimeType,
content=fileContent,
folderId=folderId,
content=fileContent
)
# Save binary data

View file

@ -13,7 +13,6 @@ from datetime import datetime, timezone
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG
from modules.shared.dbRegistry import registerDatabase
from modules.datamodels.datamodelUam import User
from modules.datamodels.datamodelMembership import UserMandate
from modules.datamodels.datamodelSubscription import (
@ -32,7 +31,6 @@ from modules.datamodels.datamodelSubscription import (
logger = logging.getLogger(__name__)
SUBSCRIPTION_DATABASE = "poweron_billing"
registerDatabase(SUBSCRIPTION_DATABASE)
_subscriptionInterfaces: Dict[str, "SubscriptionObjects"] = {}

View file

@ -393,13 +393,6 @@ def getRecordsetPaginatedWithRBAC(
continue
if key not in validColumns:
continue
if val is None:
# val=None in pagination.filters means "match empty/null"
# (same convention as connectorDbPostgre._buildPaginationClauses).
# Covers both historical empty-string values and true NULLs
# e.g. root-folder files where folderId may be "" or NULL.
whereConditions.append(f'("{key}" IS NULL OR "{key}"::TEXT = \'\')')
continue
if isinstance(val, dict):
op = val.get("operator", "equals")
v = val.get("value", "")
@ -576,13 +569,6 @@ def getDistinctColumnValuesWithRBAC(
continue
if key not in validColumns:
continue
if val is None:
# val=None in pagination.filters means "match empty/null"
# (same convention as connectorDbPostgre._buildPaginationClauses).
# Covers both historical empty-string values and true NULLs
# e.g. root-folder files where folderId may be "" or NULL.
whereConditions.append(f'("{key}" IS NULL OR "{key}"::TEXT = \'\')')
continue
if isinstance(val, dict):
op = val.get("operator", "equals")
v = val.get("value", "")

View file

@ -181,26 +181,21 @@ class VoiceObjects:
"error": str(e)
}
async def translateText(self, text: str,
sourceLanguage: Optional[str] = None,
async def translateText(self, text: str, sourceLanguage: str = "de",
targetLanguage: str = "en") -> Dict[str, Any]:
"""
Translate text using Google Cloud Translation API.
Args:
text: Text to translate
sourceLanguage: Source language ISO code (e.g. 'de', 'en'); pass None
or 'auto' to let Google auto-detect.
targetLanguage: Target language ISO code (e.g. 'en', 'de')
sourceLanguage: Source language code (e.g., 'de', 'en')
targetLanguage: Target language code (e.g., 'en', 'de')
Returns:
Dict containing translated text and metadata
"""
try:
logger.info(
f"🌐 Translation request: '{text}' "
f"({sourceLanguage or 'auto'} -> {targetLanguage})"
)
logger.info(f"🌐 Translation request: '{text}' ({sourceLanguage} -> {targetLanguage})")
if not text.strip():
return {
@ -338,11 +333,36 @@ class VoiceObjects:
"error": str(e)
}
# Voice Information
# Note: Available languages live in the central voice catalog
# (modules.shared.voiceCatalog); voice picks per language stay live from
# Google so users can see all available speakers per locale.
# Language and Voice Information
async def getAvailableLanguages(self) -> Dict[str, Any]:
"""
Get available languages from Google Cloud Text-to-Speech.
Returns:
Dict containing success status and list of available languages
"""
try:
logger.info("🌐 Getting available languages from Google Cloud TTS")
connector = self._getGoogleSpeechConnector()
result = await connector.getAvailableLanguages()
if result["success"]:
logger.info(f"✅ Found {len(result['languages'])} available languages")
else:
logger.warning(f"⚠️ Failed to get languages: {result.get('error', 'Unknown error')}")
return result
except Exception as e:
logger.error(f"❌ Error getting available languages: {e}")
return {
"success": False,
"error": str(e),
"languages": []
}
async def getAvailableVoices(self, languageCode: Optional[str] = None) -> Dict[str, Any]:
"""
Get available voices from Google Cloud Text-to-Speech.

View file

@ -1,102 +0,0 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""
SysAdmin API for database table statistics and FK orphan detection/cleanup.
"""
import logging
from typing import Any, Dict, List, Optional
from fastapi import APIRouter, Depends, HTTPException, Request, status
from pydantic import BaseModel, Field
from modules.auth import limiter
from modules.auth.authentication import requireSysAdmin
from modules.datamodels.datamodelUam import User
from modules.system.databaseHealth import (
_cleanAllOrphans,
_cleanOrphans,
_getTableStats,
_scanOrphans,
)
logger = logging.getLogger(__name__)
router = APIRouter(
prefix="/api/admin/database-health",
tags=["Admin Database Health"],
)
class OrphanCleanRequest(BaseModel):
"""Body for deleting orphans for one FK relationship."""
db: str = Field(..., description="Source database name (e.g. poweron_app)")
table: str = Field(..., description="Source table (Pydantic model class name)")
column: str = Field(..., description="FK column on the source table")
@router.get("/stats")
@limiter.limit("30/minute")
def getDatabaseTableStats(
request: Request,
db: Optional[str] = None,
currentUser: User = Depends(requireSysAdmin),
) -> Dict[str, Any]:
"""Table statistics from pg_stat_user_tables (optional filter by database name)."""
rows = _getTableStats(dbFilter=db)
return {"stats": rows}
@router.get("/orphans")
@limiter.limit("10/minute")
def getDatabaseOrphans(
request: Request,
db: Optional[str] = None,
currentUser: User = Depends(requireSysAdmin),
) -> Dict[str, Any]:
"""FK orphan scan (optional filter by source database name)."""
rows = _scanOrphans(dbFilter=db)
return {"orphans": rows}
@router.post("/orphans/clean")
@limiter.limit("10/minute")
def postDatabaseOrphansClean(
request: Request,
body: OrphanCleanRequest,
currentUser: User = Depends(requireSysAdmin),
) -> Dict[str, Any]:
"""Delete orphaned rows for a single FK relationship."""
try:
deleted = _cleanOrphans(body.db, body.table, body.column)
except ValueError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=str(e),
) from e
logger.info(
"SysAdmin orphan clean: user=%s db=%s table=%s column=%s deleted=%s",
currentUser.username,
body.db,
body.table,
body.column,
deleted,
)
return {"deleted": deleted}
@router.post("/orphans/clean-all")
@limiter.limit("2/minute")
def postDatabaseOrphansCleanAll(
request: Request,
currentUser: User = Depends(requireSysAdmin),
) -> Dict[str, Any]:
"""Run orphan cleanup for every relationship that currently has orphans."""
results: List[dict] = _cleanAllOrphans()
logger.info(
"SysAdmin orphan clean-all: user=%s batches=%s",
currentUser.username,
len(results),
)
return {"results": results}

View file

@ -9,7 +9,7 @@ import logging
from fastapi import APIRouter, Depends, HTTPException, Request, status
from modules.auth import limiter
from modules.auth.authentication import requirePlatformAdmin
from modules.auth.authentication import requireSysAdminRole
from modules.datamodels.datamodelUam import User
from modules.security.rootAccess import getRootDbAppConnector
@ -25,7 +25,7 @@ router = APIRouter(
@limiter.limit("30/minute")
def listDemoConfigs(
request: Request,
currentUser: User = Depends(requirePlatformAdmin),
currentUser: User = Depends(requireSysAdminRole),
) -> dict:
"""List all available demo configurations."""
from modules.demoConfigs import _getAvailableDemoConfigs
@ -41,7 +41,7 @@ def listDemoConfigs(
def loadDemoConfig(
code: str,
request: Request,
currentUser: User = Depends(requirePlatformAdmin),
currentUser: User = Depends(requireSysAdminRole),
) -> dict:
"""Load (create) a demo configuration. Idempotent."""
from modules.demoConfigs import _getDemoConfigByCode
@ -66,7 +66,7 @@ def loadDemoConfig(
def removeDemoConfig(
code: str,
request: Request,
currentUser: User = Depends(requirePlatformAdmin),
currentUser: User = Depends(requireSysAdminRole),
) -> dict:
"""Remove all data created by a demo configuration."""
from modules.demoConfigs import _getDemoConfigByCode

View file

@ -20,7 +20,7 @@ from pydantic import BaseModel, Field
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
from modules.routes.routeHelpers import _applyFiltersAndSort, handleFilterValuesInMemory, handleIdsInMemory
from modules.auth import limiter, getRequestContext, RequestContext, requirePlatformAdmin
from modules.auth import limiter, getRequestContext, RequestContext, requireSysAdminRole
from modules.datamodels.datamodelUam import User, UserInDB
from modules.datamodels.datamodelFeatures import Feature, FeatureInstance
from modules.interfaces.interfaceDbApp import getRootInterface
@ -95,18 +95,11 @@ def list_features(
"""
try:
# Features come from the RBAC Catalog (registered at startup from feature containers)
# NOT from the database - features are code-defined, not user-created.
# Hide meta-features (instantiable=False, e.g. ``system``) and soft-
# disabled features (enabled=False) so they don't appear in selection
# dropdowns like Admin > Feature-Instanzen > Neue Instanz.
# NOT from the database - features are code-defined, not user-created
catalogService = getCatalogService()
features = catalogService.getFeatureDefinitions()
features = [
f for f in features
if f.get("instantiable", True) and f.get("enabled", True)
]
return features
except Exception as e:
logger.error(f"Error listing features: {e}")
raise HTTPException(
@ -358,7 +351,7 @@ def create_feature(
code: str = Query(..., description="Unique feature code"),
label: Dict[str, str] = None,
icon: str = Query("mdi-puzzle", description="Icon identifier"),
sysAdmin: User = Depends(requirePlatformAdmin)
sysAdmin: User = Depends(requireSysAdminRole)
) -> Dict[str, Any]:
"""
Create a new feature definition.
@ -527,7 +520,7 @@ def get_feature_instance(
# Verify mandate access (unless SysAdmin)
if context.mandateId and str(instance.mandateId) != str(context.mandateId):
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Access denied to this feature instance")
@ -667,14 +660,14 @@ def delete_feature_instance(
# Verify mandate access
if context.mandateId and str(instance.mandateId) != str(context.mandateId):
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Access denied to this feature instance")
)
# Check mandate admin permission
if not _hasMandateAdminRole(context) and not context.isPlatformAdmin:
if not _hasMandateAdminRole(context) and not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Mandate-Admin role required to delete feature instances")
@ -734,14 +727,14 @@ def updateFeatureInstance(
# Verify mandate access
if context.mandateId and str(instance.mandateId) != str(context.mandateId):
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Access denied to this feature instance")
)
# Check mandate admin permission
if not _hasMandateAdminRole(context) and not context.isPlatformAdmin:
if not _hasMandateAdminRole(context) and not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Mandate-Admin role required to update feature instances")
@ -817,14 +810,14 @@ def sync_instance_roles(
# Verify mandate access
if context.mandateId and str(instance.mandateId) != str(context.mandateId):
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Access denied to this feature instance")
)
# Check admin permission (Mandate-Admin or Feature-Admin)
if not _hasMandateAdminRole(context) and not context.isPlatformAdmin:
if not _hasMandateAdminRole(context) and not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Admin role required to sync roles")
@ -870,14 +863,10 @@ def _syncInstanceWorkflows(
instances created before template workflows were defined, or when
the initial copy failed silently.
PlatformAdmin only.
SysAdmin only.
"""
try:
if not context.isPlatformAdmin:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Platform admin privileges required",
)
requireSysAdminRole(context.user)
rootInterface = getRootInterface()
featureInterface = getFeatureInterface(rootInterface.db)
@ -986,7 +975,7 @@ def list_template_roles(
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams"),
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
sysAdmin: User = Depends(requirePlatformAdmin),
sysAdmin: User = Depends(requireSysAdminRole),
):
"""List global template roles with pagination support."""
try:
@ -1046,7 +1035,7 @@ def create_template_role(
roleLabel: str = Query(..., description="Role label (e.g., 'admin', 'viewer')"),
featureCode: str = Query(..., description="Feature code this role belongs to"),
description: Dict[str, str] = None,
sysAdmin: User = Depends(requirePlatformAdmin)
sysAdmin: User = Depends(requireSysAdminRole)
) -> Dict[str, Any]:
"""
Create a global template role for a feature.
@ -1156,7 +1145,7 @@ def list_feature_instance_users(
# Verify mandate access (unless SysAdmin)
if context.mandateId and str(instance.mandateId) != str(context.mandateId):
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Access denied to this feature instance")
@ -1270,14 +1259,14 @@ def add_user_to_feature_instance(
# Verify mandate access
if context.mandateId and str(instance.mandateId) != str(context.mandateId):
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Access denied to this feature instance")
)
# Check admin permission
if not _hasMandateAdminRole(context) and not context.isPlatformAdmin:
if not _hasMandateAdminRole(context) and not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Admin role required to add users to feature instances")
@ -1378,14 +1367,14 @@ def remove_user_from_feature_instance(
# Verify mandate access
if context.mandateId and str(instance.mandateId) != str(context.mandateId):
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Access denied to this feature instance")
)
# Check admin permission
if not _hasMandateAdminRole(context) and not context.isPlatformAdmin:
if not _hasMandateAdminRole(context) and not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Admin role required to remove users from feature instances")
@ -1468,14 +1457,14 @@ def update_feature_instance_user_roles(
# Verify mandate access
if context.mandateId and str(instance.mandateId) != str(context.mandateId):
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Access denied to this feature instance")
)
# Check admin permission
if not _hasMandateAdminRole(context) and not context.isPlatformAdmin:
if not _hasMandateAdminRole(context) and not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Admin role required to update user roles")
@ -1576,7 +1565,7 @@ def get_feature_instance_available_roles(
# Verify mandate access
if context.mandateId and str(instance.mandateId) != str(context.mandateId):
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Access denied to this feature instance")
@ -1679,7 +1668,7 @@ def _renameFeatureInstance(
userId = str(context.user.id)
isInstanceAdmin = False
if context.isPlatformAdmin:
if context.hasSysAdminRole:
isInstanceAdmin = True
else:
from modules.datamodels.datamodelMembership import FeatureAccess, FeatureAccessRole
@ -1718,7 +1707,7 @@ def _hasMandateAdminRole(context: RequestContext) -> bool:
A user is mandate admin if they have the 'admin' role at mandate level.
"""
if context.isPlatformAdmin:
if context.hasSysAdminRole:
return True
if not context.roleIds:

View file

@ -12,7 +12,7 @@ import logging
from datetime import datetime
from fastapi import APIRouter, HTTPException, Depends, Request, Query
from fastapi.responses import PlainTextResponse
from modules.auth import limiter, requireSysAdmin
from modules.auth import limiter, requireSysAdminRole
from modules.shared.configuration import APP_CONFIG
from modules.datamodels.datamodelUam import User
@ -63,7 +63,7 @@ def _readLastNLines(filePath: str, n: int) -> list[str]:
def getLogEntries(
request: Request,
count: int = Query(default=200, ge=1, le=50000, description="Number of log entries to return"),
currentUser: User = Depends(requireSysAdmin),
currentUser: User = Depends(requireSysAdminRole),
) -> dict:
"""
Get the last N log entries from the gateway log files.
@ -104,7 +104,7 @@ def getLogEntries(
def downloadLog(
request: Request,
count: int = Query(default=1000, ge=1, le=100000, description="Number of log entries to download"),
currentUser: User = Depends(requireSysAdmin),
currentUser: User = Depends(requireSysAdminRole),
) -> PlainTextResponse:
"""
Download the last N log entries as a plain text file.

View file

@ -17,7 +17,7 @@ import logging
import json
import math
from modules.auth import limiter, getRequestContext, requirePlatformAdmin, RequestContext
from modules.auth import limiter, getRequestContext, requireSysAdminRole, RequestContext
from modules.datamodels.datamodelUam import User, UserPermissions, AccessLevel
from modules.datamodels.datamodelRbac import AccessRuleContext, AccessRule, Role
from modules.datamodels.datamodelMembership import UserMandate
@ -242,7 +242,7 @@ def get_all_permissions(
logger.debug(f"UI/RESOURCE permissions: User has {len(roleIds)} roles across all mandates")
if not roleIds and not reqContext.isPlatformAdmin:
if not roleIds and not reqContext.hasSysAdminRole:
# No roles at all, return empty permissions
for ctx in contextsToFetch:
result[ctx.value.lower()] = {}
@ -362,7 +362,7 @@ def get_access_rules(
- List of AccessRule objects
"""
try:
isSysAdmin = reqContext.isPlatformAdmin
isSysAdmin = reqContext.hasSysAdminRole
adminMandateIds = [] if isSysAdmin else _getAdminMandateIds(reqContext)
if not isSysAdmin and not adminMandateIds:
raise HTTPException(status_code=403, detail=routeApiMsg("Admin role required"))
@ -487,7 +487,7 @@ def get_access_rules_by_role(
- List of AccessRule objects for the specified role
"""
try:
isSysAdmin = reqContext.isPlatformAdmin
isSysAdmin = reqContext.hasSysAdminRole
adminMandateIds = [] if isSysAdmin else _getAdminMandateIds(reqContext)
if not isSysAdmin and not adminMandateIds:
raise HTTPException(status_code=403, detail=routeApiMsg("Admin role required"))
@ -534,7 +534,7 @@ def get_access_rule(
- AccessRule object
"""
try:
isSysAdmin = reqContext.isPlatformAdmin
isSysAdmin = reqContext.hasSysAdminRole
adminMandateIds = [] if isSysAdmin else _getAdminMandateIds(reqContext)
if not isSysAdmin and not adminMandateIds:
raise HTTPException(status_code=403, detail=routeApiMsg("Admin role required"))
@ -585,7 +585,7 @@ def create_access_rule(
- Created AccessRule object
"""
try:
isSysAdmin = reqContext.isPlatformAdmin
isSysAdmin = reqContext.hasSysAdminRole
adminMandateIds = [] if isSysAdmin else _getAdminMandateIds(reqContext)
if not isSysAdmin and not adminMandateIds:
raise HTTPException(status_code=403, detail=routeApiMsg("Admin role required"))
@ -665,7 +665,7 @@ def update_access_rule(
- Updated AccessRule object
"""
try:
isSysAdmin = reqContext.isPlatformAdmin
isSysAdmin = reqContext.hasSysAdminRole
adminMandateIds = [] if isSysAdmin else _getAdminMandateIds(reqContext)
if not isSysAdmin and not adminMandateIds:
raise HTTPException(status_code=403, detail=routeApiMsg("Admin role required"))
@ -753,7 +753,7 @@ def delete_access_rule(
- Success message
"""
try:
isSysAdmin = reqContext.isPlatformAdmin
isSysAdmin = reqContext.hasSysAdminRole
adminMandateIds = [] if isSysAdmin else _getAdminMandateIds(reqContext)
if not isSysAdmin and not adminMandateIds:
raise HTTPException(status_code=403, detail=routeApiMsg("Admin role required"))
@ -836,7 +836,7 @@ def list_roles(
- List of role dictionaries with role label, description, user count, and computed scopeType
"""
try:
isSysAdmin = reqContext.isPlatformAdmin
isSysAdmin = reqContext.hasSysAdminRole
adminMandateIds = [] if isSysAdmin else _getAdminMandateIds(reqContext)
if not isSysAdmin and not adminMandateIds:
raise HTTPException(status_code=403, detail=routeApiMsg("Admin role required"))
@ -1017,7 +1017,7 @@ def create_role(
- Created role dictionary
"""
try:
isSysAdmin = reqContext.isPlatformAdmin
isSysAdmin = reqContext.hasSysAdminRole
adminMandateIds = [] if isSysAdmin else _getAdminMandateIds(reqContext)
if not isSysAdmin and not adminMandateIds:
raise HTTPException(status_code=403, detail=routeApiMsg("Admin role required"))
@ -1076,7 +1076,7 @@ def get_role(
- Role dictionary
"""
try:
isSysAdmin = reqContext.isPlatformAdmin
isSysAdmin = reqContext.hasSysAdminRole
adminMandateIds = [] if isSysAdmin else _getAdminMandateIds(reqContext)
if not isSysAdmin and not adminMandateIds:
raise HTTPException(status_code=403, detail=routeApiMsg("Admin role required"))
@ -1137,7 +1137,7 @@ def update_role(
- Updated role dictionary
"""
try:
isSysAdmin = reqContext.isPlatformAdmin
isSysAdmin = reqContext.hasSysAdminRole
adminMandateIds = [] if isSysAdmin else _getAdminMandateIds(reqContext)
if not isSysAdmin and not adminMandateIds:
raise HTTPException(status_code=403, detail=routeApiMsg("Admin role required"))
@ -1201,7 +1201,7 @@ def delete_role(
- Success message
"""
try:
isSysAdmin = reqContext.isPlatformAdmin
isSysAdmin = reqContext.hasSysAdminRole
adminMandateIds = [] if isSysAdmin else _getAdminMandateIds(reqContext)
if not isSysAdmin and not adminMandateIds:
raise HTTPException(status_code=403, detail=routeApiMsg("Admin role required"))
@ -1357,7 +1357,7 @@ def getCatalogObjects(
def cleanup_duplicate_access_rules(
request: Request,
dryRun: bool = Query(True, description="If true, only report duplicates without deleting"),
currentUser: User = Depends(requirePlatformAdmin)
currentUser: User = Depends(requireSysAdminRole)
) -> dict:
"""
Find and remove duplicate AccessRules.

View file

@ -75,7 +75,7 @@ def _hasMandateAdminRole(context: RequestContext) -> bool:
Loads roles independently from request context (context.roleIds may be empty
when no X-Mandate-Id header is sent, e.g., on admin pages).
"""
if context.isPlatformAdmin:
if context.hasSysAdminRole:
return True
try:
rootInterface = getRootInterface()
@ -123,7 +123,7 @@ def listUsersForOverview(
try:
interface = getRootInterface()
if context.isPlatformAdmin and not context.mandateId:
if context.hasSysAdminRole and not context.mandateId:
# SysAdmin without mandate context: all users
allUsers = interface.getAllUsers()
elif context.mandateId:
@ -164,7 +164,6 @@ def listUsersForOverview(
"email": userData.get("email"),
"fullName": userData.get("fullName"),
"isSysAdmin": userData.get("isSysAdmin", False),
"isPlatformAdmin": userData.get("isPlatformAdmin", False),
"enabled": userData.get("enabled", True),
})
@ -218,7 +217,7 @@ def getUserAccessOverview(
interface = getRootInterface()
# MandateAdmin: verify the requested user shares at least one admin mandate
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
# Get admin's mandate IDs
adminMandateIds = []
userMandates = interface.getUserMandates(str(context.user.id))
@ -259,7 +258,6 @@ def getUserAccessOverview(
"email": user.email,
"fullName": user.fullName,
"isSysAdmin": user.isSysAdmin,
"isPlatformAdmin": getattr(user, "isPlatformAdmin", False),
"enabled": user.enabled,
}
@ -483,8 +481,7 @@ def getUserAccessOverview(
return {
"user": userInfo,
"isSysAdmin": bool(getattr(user, "isSysAdmin", False)),
"isPlatformAdmin": bool(getattr(user, "isPlatformAdmin", False)),
"isSysAdmin": False,
"roles": allRoles,
"mandates": mandatesInfo,
"uiAccess": uiAccess,

View file

@ -10,10 +10,9 @@ Provides three views:
RBAC: mandate-admin or compliance-viewer role required.
"""
import json
import logging
import re
from typing import Any, Dict, List, Optional
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, Query, Path, status
from starlette.requests import Request
@ -28,110 +27,10 @@ routeApiMsg = apiRouteContext("routeAudit")
router = APIRouter(prefix="/api/audit", tags=["Audit"])
def _applySortFilterSearch(
items: List[Dict[str, Any]],
*,
sortJson: Optional[str] = None,
filtersJson: Optional[str] = None,
search: Optional[str] = None,
searchableKeys: Optional[List[str]] = None,
) -> List[Dict[str, Any]]:
"""Apply sort, filter and search to a list of dicts in-memory."""
if filtersJson:
try:
filters = json.loads(filtersJson) if isinstance(filtersJson, str) else filtersJson
if isinstance(filters, dict):
for key, val in filters.items():
if val is None or val == "":
continue
if isinstance(val, list):
items = [r for r in items if str(r.get(key, "")) in [str(v) for v in val]]
else:
items = [r for r in items if str(r.get(key, "")).lower() == str(val).lower()]
except (json.JSONDecodeError, TypeError):
pass
if search and searchableKeys:
needle = search.lower()
items = [r for r in items if any(needle in str(r.get(k, "")).lower() for k in searchableKeys)]
if sortJson:
try:
sortList = json.loads(sortJson) if isinstance(sortJson, str) else sortJson
if isinstance(sortList, list):
for sortDef in reversed(sortList):
field = sortDef.get("field", "")
desc = sortDef.get("direction", "asc") == "desc"
items.sort(key=lambda r, f=field: (r.get(f) is None, r.get(f, "")), reverse=desc)
except (json.JSONDecodeError, TypeError):
pass
return items
def _distinctColumnValues(items: List[Dict[str, Any]], column: str) -> List[str]:
"""Extract sorted distinct non-empty string values for a column."""
vals = set()
for r in items:
v = r.get(column)
if v is not None and v != "":
vals.add(str(v))
return sorted(vals)
def _enrichUserAndInstanceLabels(
items: List[Dict[str, Any]],
context: "RequestContext",
userKey: str = "userId",
usernameKey: str = "username",
instanceKey: str = "featureInstanceId",
instanceLabelKey: str = "instanceLabel",
) -> None:
"""Resolve userId → username and featureInstanceId → label in-place."""
userIds = set()
instanceIds = set()
for r in items:
uid = r.get(userKey)
if uid and not r.get(usernameKey):
userIds.add(uid)
iid = r.get(instanceKey)
if iid:
instanceIds.add(iid)
userMap: Dict[str, str] = {}
instanceMap: Dict[str, str] = {}
try:
from modules.interfaces.interfaceDbApp import getInterface
appIf = getInterface(
context.user,
mandateId=str(context.mandateId) if context.mandateId else None,
)
if userIds:
users = appIf.getUsersByIds(list(userIds))
for uid, u in users.items():
name = getattr(u, "displayName", None) or getattr(u, "email", None) or uid
userMap[uid] = name
if instanceIds:
for iid in instanceIds:
fi = appIf.getFeatureInstance(iid)
if fi:
instanceMap[iid] = getattr(fi, "label", None) or getattr(fi, "featureCode", None) or iid
except Exception as e:
logger.debug("_enrichUserAndInstanceLabels: %s", e)
for r in items:
uid = r.get(userKey)
if uid and not r.get(usernameKey) and uid in userMap:
r[usernameKey] = userMap[uid]
iid = r.get(instanceKey)
if iid and iid in instanceMap:
r[instanceLabelKey] = instanceMap[iid]
def _requireAuditAccess(context: RequestContext):
"""Raise 403 unless user has mandate-admin or compliance-viewer access."""
if context.isPlatformAdmin:
from modules.auth.authentication import _hasSysAdminRole
if _hasSysAdminRole(str(context.user.id)):
return
from modules.interfaces.interfaceDbApp import getInterface
@ -163,11 +62,6 @@ async def getAiAuditLog(
dateTo: Optional[float] = Query(None, description="UTC epoch seconds"),
limit: int = Query(50, ge=1, le=500),
offset: int = Query(0, ge=0),
sort: Optional[str] = Query(None, description='JSON array, e.g. [{"field":"timestamp","direction":"desc"}]'),
filters: Optional[str] = Query(None, description='JSON object, e.g. {"aiModel":"gpt-4o"}'),
search: Optional[str] = Query(None),
mode: Optional[str] = Query(None, description="'filterValues' to get distinct values for a column"),
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
):
_requireAuditAccess(context)
mandateId = str(context.mandateId) if context.mandateId else ""
@ -175,35 +69,16 @@ async def getAiAuditLog(
raise HTTPException(status_code=400, detail=routeApiMsg("Mandanten-ID erforderlich"))
from modules.shared.aiAuditLogger import aiAuditLogger
result = aiAuditLogger.getAiAuditLogs(
return aiAuditLogger.getAiAuditLogs(
mandateId,
userId=userId,
featureInstanceId=featureInstanceId,
aiModel=aiModel,
fromTimestamp=dateFrom,
toTimestamp=dateTo,
limit=9999,
offset=0,
limit=limit,
offset=offset,
)
items = result.get("items", [])
_enrichUserAndInstanceLabels(items, context)
if mode == "filterValues" and column:
items = _applySortFilterSearch(items, filtersJson=filters)
return _distinctColumnValues(items, column)
items = _applySortFilterSearch(
items,
sortJson=sort,
filtersJson=filters,
search=search,
searchableKeys=["username", "aiModel", "instanceLabel", "aiProvider", "operationType"],
)
totalItems = len(items)
page = items[offset: offset + limit]
return {"items": page, "totalItems": totalItems}
@router.get("/ai-log/{entryId}/content")
@ -259,11 +134,6 @@ async def getAuditLog(
dateTo: Optional[float] = Query(None),
limit: int = Query(100, ge=1, le=500),
offset: int = Query(0, ge=0),
sort: Optional[str] = Query(None),
filters: Optional[str] = Query(None),
search: Optional[str] = Query(None),
mode: Optional[str] = Query(None),
column: Optional[str] = Query(None),
):
_requireAuditAccess(context)
mandateId = str(context.mandateId) if context.mandateId else None
@ -276,23 +146,8 @@ async def getAuditLog(
action=action,
fromTimestamp=dateFrom,
toTimestamp=dateTo,
limit=9999,
limit=limit + offset + 1,
)
_enrichUserAndInstanceLabels(records, context)
if mode == "filterValues" and column:
records = _applySortFilterSearch(records, filtersJson=filters)
return _distinctColumnValues(records, column)
records = _applySortFilterSearch(
records,
sortJson=sort,
filtersJson=filters,
search=search,
searchableKeys=["username", "action", "resourceType", "category"],
)
totalItems = len(records)
page = records[offset: offset + limit]
return {"items": page, "totalItems": totalItems}
@ -326,11 +181,6 @@ async def getNeutralizationMappings(
context: RequestContext = Depends(getRequestContext),
limit: int = Query(200, ge=1, le=2000),
offset: int = Query(0, ge=0),
sort: Optional[str] = Query(None),
filters: Optional[str] = Query(None),
search: Optional[str] = Query(None),
mode: Optional[str] = Query(None),
column: Optional[str] = Query(None),
):
_requireAuditAccess(context)
mandateId = str(context.mandateId) if context.mandateId else ""
@ -346,23 +196,7 @@ async def getNeutralizationMappings(
pType = item.get("patternType", "")
uid = item.get("id", "")
item["placeholder"] = f"[{pType}.{uid}]" if pType and uid else uid
_enrichUserAndInstanceLabels(items, context)
if mode == "filterValues" and column:
items = _applySortFilterSearch(items, filtersJson=filters)
return _distinctColumnValues(items, column)
items = _applySortFilterSearch(
items,
sortJson=sort,
filtersJson=filters,
search=search,
searchableKeys=["placeholder", "originalText", "patternType"],
)
if not sort:
items.sort(key=lambda r: (r.get("patternType", ""), r.get("originalText", "")))
items.sort(key=lambda r: (r.get("patternType", ""), r.get("originalText", "")))
totalItems = len(items)
page = items[offset: offset + limit]
return {"items": page, "totalItems": totalItems}

View file

@ -17,7 +17,7 @@ from datetime import date, datetime, timezone
from pydantic import BaseModel, Field
# Import auth module
from modules.auth import limiter, requirePlatformAdmin, getRequestContext, RequestContext
from modules.auth import limiter, requireSysAdminRole, getRequestContext, RequestContext
# Import billing components
from modules.interfaces.interfaceDbBilling import getInterface as getBillingInterface, _getRootInterface
@ -86,7 +86,8 @@ def _getBillingDataScope(user) -> BillingDataScope:
"""
scope = BillingDataScope(userId=user.id)
if bool(getattr(user, "isPlatformAdmin", False)):
from modules.auth.authentication import _hasSysAdminRole
if _hasSysAdminRole(str(user.id)):
scope.isGlobalAdmin = True
return scope
@ -140,8 +141,8 @@ def _getBillingDataScope(user) -> BillingDataScope:
def _isAdminOfMandate(ctx: RequestContext, targetMandateId: str) -> bool:
"""Check if user is PlatformAdmin or admin of the specified mandate."""
if ctx.isPlatformAdmin:
"""Check if user is SysAdmin or admin of the specified mandate."""
if ctx.hasSysAdminRole:
return True
try:
from modules.interfaces.interfaceDbApp import getRootInterface
@ -733,7 +734,7 @@ def addCredit(
targetMandateId: str = Path(..., description="Mandate ID"),
creditRequest: CreditAddRequest = Body(...),
ctx: RequestContext = Depends(getRequestContext),
_admin = Depends(requirePlatformAdmin)
_admin = Depends(requireSysAdminRole)
):
"""
Add credit to a billing account (SysAdmin only).
@ -1460,7 +1461,7 @@ def getTransactionsAdmin(
def getMandateViewBalances(
request: Request,
ctx: RequestContext = Depends(getRequestContext),
_admin = Depends(requirePlatformAdmin)
_admin = Depends(requireSysAdminRole)
):
"""
Get mandate-level balances (SysAdmin only).
@ -1483,7 +1484,7 @@ def getMandateViewTransactions(
request: Request,
limit: int = Query(default=100, ge=1, le=1000),
ctx: RequestContext = Depends(getRequestContext),
_admin = Depends(requirePlatformAdmin)
_admin = Depends(requireSysAdminRole)
):
"""
Get all transactions across mandates (SysAdmin only).

View file

@ -427,54 +427,14 @@ def update_connection(
detail=routeApiMsg("Connection not found")
)
# Merge incoming changes into a dict and re-validate via pydantic.
# Direct setattr() bypasses type coercion (PowerOnModel doesn't enable
# validate_assignment), which leaves enum fields as raw strings and
# later breaks .value access. Also filters out computed / unknown keys.
writableFields = set(UserConnection.model_fields.keys())
previous = connection.model_dump()
merged = dict(previous)
# Update connection fields
for field, value in connection_data.items():
if field in writableFields:
merged[field] = value
merged["lastChecked"] = getUtcTimestamp()
connection = UserConnection.model_validate(merged)
# If this is a remote (non-local) connection and any identity-bearing
# field changed, the stored OAuth tokens no longer match the account.
# Force the user to reconnect: mark PENDING and revoke existing tokens.
identityFields = ("externalUsername", "externalEmail", "externalId", "authority")
authorityValue = (
connection.authority.value
if hasattr(connection.authority, "value")
else str(connection.authority)
)
isRemote = authorityValue != AuthAuthority.LOCAL.value
identityChanged = any(
previous.get(field) != merged.get(field) for field in identityFields
)
if isRemote and identityChanged:
connection.status = ConnectionStatus.PENDING
connection.expiresAt = None
try:
existingTokens = interface.db.getRecordset(
Token, recordFilter={"connectionId": connectionId}
)
for token in existingTokens:
interface.revokeTokenById(
token["id"],
revokedBy=currentUser.id,
reason="connection identity changed",
)
logger.info(
f"Revoked {len(existingTokens)} token(s) for connection "
f"{connectionId} after identity change; reconnect required."
)
except Exception as e:
logger.warning(
f"Failed to revoke tokens for connection {connectionId}: {str(e)}"
)
if hasattr(connection, field):
setattr(connection, field, value)
# Update lastChecked timestamp using UTC timestamp
connection.lastChecked = getUtcTimestamp()
# Update connection - models now handle timestamp serialization automatically
interface.db.recordModify(UserConnection, connectionId, connection.model_dump())

View file

@ -8,6 +8,7 @@ import json
# Import auth module
from modules.auth import limiter, getCurrentUser, getRequestContext, RequestContext
from modules.auth.authentication import _hasSysAdminRole
# Import interfaces
import modules.interfaces.interfaceDbManagement as interfaceDbManagement
@ -242,16 +243,8 @@ def get_files(
recordFilter = None
if paginationParams and paginationParams.filters and "folderId" in paginationParams.filters:
fVal = paginationParams.filters.get("folderId")
# For a concrete folderId we use recordFilter (exact equality).
# For null / empty (= "root") we keep it in pagination.filters so the
# connector applies `IS NULL OR = ''` files predating the folderId
# fix were stored with an empty string instead of NULL.
if fVal is None or (isinstance(fVal, str) and fVal.strip() == ""):
paginationParams.filters["folderId"] = None
else:
paginationParams.filters.pop("folderId")
recordFilter = {"folderId": fVal}
fVal = paginationParams.filters.pop("folderId")
recordFilter = {"folderId": fVal}
result = managementInterface.getAllFiles(pagination=paginationParams, recordFilter=recordFilter)
@ -289,19 +282,13 @@ async def upload_file(
file: UploadFile = File(...),
workflowId: Optional[str] = Form(None),
featureInstanceId: Optional[str] = Form(None),
folderId: Optional[str] = Form(None),
currentUser: User = Depends(getCurrentUser),
context: RequestContext = Depends(getRequestContext),
currentUser: User = Depends(getCurrentUser)
) -> JSONResponse:
# Add fileName property to UploadFile for consistency with backend model
file.fileName = file.filename
"""Upload a file"""
try:
managementInterface = interfaceDbManagement.getInterface(
currentUser,
mandateId=str(context.mandateId) if context.mandateId else None,
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
)
managementInterface = interfaceDbManagement.getInterface(currentUser)
# Read file
fileContent = await file.read()
@ -314,29 +301,12 @@ async def upload_file(
detail=f"File too large. Maximum size: {interfaceDbManagement.APP_CONFIG.get('File_Management_MAX_UPLOAD_SIZE_MB')}MB"
)
# Normalize folderId: empty string / "null" / "root" → None (root folder)
normalizedFolderId: Optional[str] = folderId
if isinstance(normalizedFolderId, str):
trimmed = normalizedFolderId.strip()
if not trimmed or trimmed.lower() in {"null", "none", "root"}:
normalizedFolderId = None
else:
normalizedFolderId = trimmed
# Save file via LucyDOM interface in the database
fileItem, duplicateType = managementInterface.saveUploadedFile(
fileContent, file.filename, folderId=normalizedFolderId
)
fileItem, duplicateType = managementInterface.saveUploadedFile(fileContent, file.filename)
if featureInstanceId and not fileItem.featureInstanceId:
managementInterface.updateFile(fileItem.id, {"featureInstanceId": featureInstanceId})
fileItem.featureInstanceId = featureInstanceId
# For exact duplicates we keep the existing record, but move it into the
# target folder so the user actually sees their upload land where they expect.
if duplicateType == "exact_duplicate" and normalizedFolderId != getattr(fileItem, "folderId", None):
managementInterface.updateFile(fileItem.id, {"folderId": normalizedFolderId})
fileItem.folderId = normalizedFolderId
# Determine response message based on duplicate type
if duplicateType == "exact_duplicate":
@ -532,153 +502,6 @@ def move_folder(
raise HTTPException(status_code=500, detail=str(e))
@router.patch("/folders/{folderId}/scope")
@limiter.limit("10/minute")
def _updateFolderScope(
request: Request,
folderId: str = Path(..., description="ID of the folder"),
scope: str = Body(..., embed=True),
context: RequestContext = Depends(getRequestContext),
) -> Dict[str, Any]:
"""Update the scope of a folder. Propagates to all files inside (recursively). Global scope requires sysAdmin."""
validScopes = {"personal", "featureInstance", "mandate", "global"}
if scope not in validScopes:
raise HTTPException(status_code=400, detail=f"Invalid scope: {scope}. Must be one of {validScopes}")
if scope == "global" and not context.isSysAdmin:
raise HTTPException(status_code=403, detail=routeApiMsg("Only sysadmins can set global scope"))
try:
mgmt = interfaceDbManagement.getInterface(
context.user,
mandateId=str(context.mandateId) if context.mandateId else None,
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
)
folder = mgmt.getFolder(folderId)
if not folder:
raise HTTPException(status_code=404, detail=routeApiMsg("Folder not found"))
mgmt.updateFolder(folderId, {"scope": scope})
fileIds = _collectFolderFileIds(mgmt, folderId)
for fid in fileIds:
try:
mgmt.updateFile(fid, {"scope": scope})
except Exception as e:
logger.error("Folder scope propagation: failed to update file %s: %s", fid, e)
logger.info("Updated scope=%s for folder %s: %d files affected", scope, folderId, len(fileIds))
return {"folderId": folderId, "scope": scope, "filesUpdated": len(fileIds)}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error updating folder scope: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.patch("/folders/{folderId}/neutralize")
@limiter.limit("10/minute")
def updateFolderNeutralize(
request: Request,
background_tasks: BackgroundTasks,
folderId: str = Path(..., description="ID of the folder"),
neutralize: bool = Body(..., embed=True),
context: RequestContext = Depends(getRequestContext),
) -> Dict[str, Any]:
"""Toggle neutralization on a folder. Propagates to all files inside (recursively).
When turning ON: all files in the folder get ``neutralize=True``, their
knowledge indexes are purged synchronously, and background re-indexing
is triggered.
When turning OFF: files revert to ``neutralize=False`` unless they were
individually marked (not implemented yet -- all are reverted).
"""
try:
mgmt = interfaceDbManagement.getInterface(
context.user,
mandateId=str(context.mandateId) if context.mandateId else None,
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
)
folder = mgmt.getFolder(folderId)
if not folder:
raise HTTPException(status_code=404, detail=routeApiMsg("Folder not found"))
mgmt.updateFolder(folderId, {"neutralize": neutralize})
fileIds = _collectFolderFileIds(mgmt, folderId)
logger.info("Folder neutralize toggle %s for folder %s: %d files affected", neutralize, folderId, len(fileIds))
from modules.interfaces.interfaceDbKnowledge import getInterface as getKnowledgeInterface
knowledgeDb = getKnowledgeInterface()
for fid in fileIds:
try:
mgmt.updateFile(fid, {"neutralize": neutralize})
if neutralize:
try:
knowledgeDb.deleteFileContentIndex(fid)
except Exception as e:
logger.warning("Folder neutralize: failed to purge index for file %s: %s", fid, e)
else:
try:
from modules.datamodels.datamodelKnowledge import FileContentIndex
indices = knowledgeDb.db.getRecordset(FileContentIndex, recordFilter={"id": fid})
for idx in indices:
idxId = idx.get("id") if isinstance(idx, dict) else getattr(idx, "id", None)
if idxId:
knowledgeDb.db.recordModify(FileContentIndex, idxId, {
"neutralizationStatus": "original",
"isNeutralized": False,
})
except Exception as e:
logger.warning("Folder neutralize OFF: metadata update failed for %s: %s", fid, e)
except Exception as e:
logger.error("Folder neutralize: failed to update file %s: %s", fid, e)
for fid in fileIds:
fileMeta = mgmt.getFile(fid)
if fileMeta:
fn = fileMeta.fileName if hasattr(fileMeta, "fileName") else fileMeta.get("fileName", "")
mt = fileMeta.mimeType if hasattr(fileMeta, "mimeType") else fileMeta.get("mimeType", "")
async def _reindex(fileId=fid, fileName=fn, mimeType=mt):
try:
await _autoIndexFile(fileId=fileId, fileName=fileName, mimeType=mimeType, user=context.user)
except Exception as ex:
logger.error("Folder neutralize re-index failed for %s: %s", fileId, ex)
background_tasks.add_task(_reindex)
return {"folderId": folderId, "neutralize": neutralize, "filesUpdated": len(fileIds)}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error updating folder neutralize flag: {e}")
raise HTTPException(status_code=500, detail=str(e))
def _collectFolderFileIds(mgmt, folderId: str) -> List[str]:
"""Recursively collect all file IDs in a folder and its sub-folders."""
fileIds = []
try:
files = mgmt.listFiles(folderId=folderId)
if isinstance(files, dict):
files = files.get("files", [])
for f in (files or []):
fid = f.get("id") if isinstance(f, dict) else getattr(f, "id", None)
if fid:
fileIds.append(fid)
except Exception as e:
logger.warning("_collectFolderFileIds: listFiles failed for folder %s: %s", folderId, e)
try:
subFolders = mgmt.listFolders(parentId=folderId)
for sf in (subFolders or []):
sfId = sf.get("id") if isinstance(sf, dict) else getattr(sf, "id", None)
if sfId:
fileIds.extend(_collectFolderFileIds(mgmt, sfId))
except Exception as e:
logger.warning("_collectFolderFileIds: listFolders failed for folder %s: %s", folderId, e)
return fileIds
@router.get("/folders/{folderId}/download")
@limiter.limit("10/minute")
def download_folder(
@ -846,7 +669,7 @@ def updateFileScope(
if scope not in validScopes:
raise HTTPException(status_code=400, detail=f"Invalid scope: {scope}. Must be one of {validScopes}")
if scope == "global" and not context.isSysAdmin:
if scope == "global" and not context.hasSysAdminRole:
raise HTTPException(status_code=403, detail=routeApiMsg("Only sysadmins can set global scope"))
managementInterface = interfaceDbManagement.getInterface(
@ -1040,7 +863,7 @@ def update_file(
detail=f"File with ID {fileId} not found"
)
if safeData.get("scope") == "global" and not getattr(currentUser, "isSysAdmin", False):
if safeData.get("scope") == "global" and not _hasSysAdminRole(str(currentUser.id)):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Only sysadmins can set global scope"),
@ -1205,18 +1028,6 @@ def move_file(
featureInstanceId=str(context.featureInstanceId) if context.featureInstanceId else None,
)
mgmt.updateFile(fileId, {"folderId": targetFolderId})
if targetFolderId:
try:
targetFolder = mgmt.getFolder(targetFolderId)
folderNeut = (targetFolder.get("neutralize") if isinstance(targetFolder, dict)
else getattr(targetFolder, "neutralize", False)) if targetFolder else False
if folderNeut:
mgmt.updateFile(fileId, {"neutralize": True})
logger.info("File %s moved to neutralized folder %s — inherited neutralize=True", fileId, targetFolderId)
except Exception as e:
logger.warning("File move: folder neutralize inheritance check failed for %s: %s", fileId, e)
return {"success": True, "fileId": fileId, "folderId": targetFolderId}
except Exception as e:
logger.error(f"Error moving file: {e}")

View file

@ -5,8 +5,7 @@ Mandate routes for the backend API.
Implements the endpoints for mandate management.
MULTI-TENANT:
- Mandate create/delete and cross-mandate ops require PlatformAdmin
- Mandate read/update: PlatformAdmin or Mandate-Admin (label-only for the latter)
- Mandate CRUD is SysAdmin-only (mandates are system resources)
- User management within mandates is Mandate-Admin (add/remove users)
"""
@ -18,7 +17,7 @@ import json
from pydantic import BaseModel, Field
# Import auth module
from modules.auth import limiter, requirePlatformAdmin, getRequestContext, getCurrentUser, RequestContext
from modules.auth import limiter, requireSysAdminRole, getRequestContext, RequestContext
# Import interfaces
import modules.interfaces.interfaceDbApp as interfaceDbApp
@ -34,8 +33,6 @@ from modules.datamodels.datamodelPagination import PaginationParams, PaginatedRe
from modules.routes.routeNotifications import create_access_change_notification
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import SubscriptionCapacityException
from modules.shared.i18nRegistry import apiRouteContext
from modules.shared.mandateNameUtils import isValidMandateName
routeApiMsg = apiRouteContext("routeDataMandates")
@ -104,8 +101,8 @@ def get_mandates(
"""
try:
# Check admin access
isPlatformAdmin = context.isPlatformAdmin
if not isPlatformAdmin:
isSysAdmin = context.hasSysAdminRole
if not isSysAdmin:
adminMandateIds = _getAdminMandateIds(context)
if not adminMandateIds:
raise HTTPException(
@ -138,7 +135,7 @@ def get_mandates(
if mode == "filterValues":
if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
if isPlatformAdmin:
if isSysAdmin:
crossPagination = parseCrossFilterPagination(column, pagination)
try:
from fastapi.responses import JSONResponse
@ -158,7 +155,7 @@ def get_mandates(
return handleFilterValuesInMemory(mandateItems, column, pagination)
if mode == "ids":
if isPlatformAdmin:
if isSysAdmin:
return handleIdsMode(appInterface.db, Mandate, pagination)
else:
mandateItems = []
@ -168,7 +165,7 @@ def get_mandates(
mandateItems.append(m.model_dump() if hasattr(m, 'model_dump') else m if isinstance(m, dict) else vars(m))
return handleIdsInMemory(mandateItems, pagination)
if isPlatformAdmin:
if isSysAdmin:
result = appInterface.getAllMandates(pagination=paginationParams)
else:
allMandates = []
@ -226,7 +223,7 @@ def get_mandate(
try:
mandateId = targetMandateId
# Check access
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
adminMandateIds = _getAdminMandateIds(context)
if mandateId not in adminMandateIds:
raise HTTPException(
@ -257,48 +254,37 @@ def get_mandate(
@limiter.limit("10/minute")
def create_mandate(
request: Request,
mandateData: dict = Body(..., description="Mandate data: label (Voller Name) required unless name alone is provided; name (Kurzzeichen) optional — auto-generated from label if omitted"),
currentUser: User = Depends(requirePlatformAdmin)
mandateData: dict = Body(..., description="Mandate data with at least 'name' field"),
currentUser: User = Depends(requireSysAdminRole)
) -> Mandate:
"""
Create a new mandate.
MULTI-TENANT: PlatformAdmin-only.
MULTI-TENANT: SysAdmin-only.
"""
try:
logger.debug(f"Creating mandate with data: {mandateData}")
labelRaw = mandateData.get("label")
nameRaw = mandateData.get("name")
labelStripped = str(labelRaw).strip() if labelRaw is not None else ""
if not labelStripped and nameRaw is not None:
labelStripped = str(nameRaw).strip()
if not labelStripped:
# Validate required fields
name = mandateData.get('name')
if not name or (isinstance(name, str) and name.strip() == ''):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=routeApiMsg("Mandate Voller Name (label) is required"),
detail=routeApiMsg("Mandate name is required")
)
nameToPass = None
if nameRaw is not None and str(nameRaw).strip() != "":
nameToPass = str(nameRaw).strip()
if not isValidMandateName(nameToPass):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=routeApiMsg(
"Mandate Kurzzeichen (name) must be 232 characters: lowercase az, digits, hyphens only"
),
)
enabled = mandateData.get("enabled", True)
# Get optional fields with defaults
label = mandateData.get('label')
enabled = mandateData.get('enabled', True)
appInterface = interfaceDbApp.getRootInterface()
# Create mandate
newMandate = appInterface.createMandate(
name=nameToPass,
label=labelStripped,
enabled=bool(enabled) if enabled is not None else True,
name=name,
label=label,
enabled=enabled
)
if not newMandate:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
@ -343,22 +329,11 @@ def create_mandate(
except Exception as subErr:
logger.error(f"Failed to create subscription for mandate {newMandate.id}: {subErr}")
logger.info(f"Mandate {newMandate.id} created by PlatformAdmin {currentUser.id}")
logger.info(f"Mandate {newMandate.id} created by SysAdmin {currentUser.id}")
return newMandate
except HTTPException:
raise
except ValueError as ve:
logger.warning(f"Create mandate validation: {ve}")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=routeApiMsg(str(ve)),
)
except PermissionError:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("No permission to create mandates"),
)
except Exception as e:
logger.error(f"Error creating mandate: {str(e)}")
raise HTTPException(
@ -366,96 +341,32 @@ def create_mandate(
detail=f"Failed to create mandate: {str(e)}"
)
_MANDATE_ADMIN_EDITABLE_FIELDS = {"label"}
def _isUserAdminOfMandate(userId: str, targetMandateId: str) -> bool:
"""Check mandate-admin without RequestContext (avoids Header param conflicts)."""
try:
rootInterface = interfaceDbApp.getRootInterface()
userMandates = rootInterface.getUserMandates(userId)
for um in userMandates:
if str(getattr(um, 'mandateId', '')) != str(targetMandateId):
continue
umId = getattr(um, 'id', None)
if not umId:
continue
roleIds = rootInterface.getRoleIdsForUserMandate(str(umId))
for roleId in roleIds:
role = rootInterface.getRole(roleId)
if role and role.roleLabel == "admin" and not role.featureInstanceId:
return True
except Exception as e:
logger.error(f"Error checking mandate admin: {e}")
return False
@router.put("/{mandateId}", response_model=Mandate)
@limiter.limit("10/minute")
def update_mandate(
request: Request,
mandateId: str = Path(..., description="ID of the mandate to update"),
mandateData: dict = Body(..., description="Mandate update data"),
currentUser: User = Depends(getCurrentUser)
currentUser: User = Depends(requireSysAdminRole)
) -> Mandate:
"""
Update an existing mandate.
MULTI-TENANT:
- PlatformAdmin: full update (including Kurzzeichen name)
- MandateAdmin: only label (Voller Name)
MULTI-TENANT: SysAdmin-only.
"""
userId = str(currentUser.id)
isPlatformAdmin = bool(getattr(currentUser, "isPlatformAdmin", False))
if not isPlatformAdmin:
if not _isUserAdminOfMandate(userId, mandateId):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Admin role required to update mandate")
)
try:
logger.debug(f"Updating mandate {mandateId} with data: {mandateData}")
appInterface = interfaceDbApp.getRootInterface()
# Check if mandate exists
existingMandate = appInterface.getMandate(mandateId)
if not existingMandate:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Mandate with ID {mandateId} not found"
)
if not isPlatformAdmin:
mandateData = {k: v for k, v in mandateData.items() if k in _MANDATE_ADMIN_EDITABLE_FIELDS}
if not mandateData:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("No editable fields submitted")
)
if "label" in mandateData:
lbl = mandateData["label"]
if lbl is None or str(lbl).strip() == "":
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=routeApiMsg("Mandate Voller Name (label) must not be empty"),
)
else:
if "name" in mandateData and mandateData["name"] is not None:
nm = str(mandateData["name"]).strip()
if nm and not isValidMandateName(nm):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=routeApiMsg(
"Mandate Kurzzeichen (name) must be 232 characters: lowercase az, digits, hyphens only"
),
)
if "label" in mandateData and mandateData["label"] is not None:
lb = str(mandateData["label"]).strip()
if not lb:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=routeApiMsg("Mandate Voller Name (label) must not be empty"),
)
# Update mandate - mandateData is already a dict
updatedMandate = appInterface.updateMandate(mandateId, mandateData)
if not updatedMandate:
@ -464,22 +375,11 @@ def update_mandate(
detail=routeApiMsg("Failed to update mandate")
)
logger.info(f"Mandate {mandateId} updated by user {currentUser.id} (platformAdmin={isPlatformAdmin})")
logger.info(f"Mandate {mandateId} updated by SysAdmin {currentUser.id}")
return updatedMandate
except HTTPException:
raise
except ValueError as ve:
logger.warning(f"Update mandate validation: {ve}")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=routeApiMsg(str(ve)),
)
except PermissionError:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("No permission to update mandate"),
)
except Exception as e:
logger.error(f"Error updating mandate {mandateId}: {str(e)}")
raise HTTPException(
@ -493,7 +393,7 @@ def delete_mandate(
request: Request,
mandateId: str = Path(..., description="ID of the mandate to delete"),
force: bool = Query(False, description="Hard-delete with full cascade (irreversible)"),
currentUser: User = Depends(requirePlatformAdmin)
currentUser: User = Depends(requireSysAdminRole)
) -> Dict[str, Any]:
"""
Delete a mandate.
@ -566,7 +466,7 @@ def list_mandate_users(
pagination: Optional pagination parameters (page, pageSize, search, filters, sort)
"""
# Check permission
if not _hasMandateAdminRole(context, targetMandateId) and not context.isPlatformAdmin:
if not _hasMandateAdminRole(context, targetMandateId) and not context.hasSysAdminRole:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Mandate-Admin role required")
@ -971,7 +871,7 @@ def update_user_roles_in_mandate(
# Add new role assignments
for roleId in roleIds:
rootInterface.addRoleToUserMandate(str(membership.id), roleId)
# Audit - Log role assignment change
audit_logger.logPermissionChange(
userId=str(context.user.id),
@ -1079,7 +979,7 @@ def _hasMandateAdminRole(context: RequestContext, mandateId: str) -> bool:
Check if the user has mandate admin role for the specified mandate.
Works with or without X-Mandate-Id header (admin pages don't send it).
"""
if context.isPlatformAdmin:
if context.hasSysAdminRole:
return True
# If mandate context matches, check roles from context directly

View file

@ -3,10 +3,11 @@
"""PATCH endpoints for DataSource and FeatureDataSource scope/neutralize tagging."""
import logging
from typing import Any, Dict, List, Optional
from typing import Any, Dict
from fastapi import APIRouter, HTTPException, Depends, Path, Request, Body
from modules.auth import limiter, getRequestContext, RequestContext
from modules.auth.authentication import _hasSysAdminRole
from modules.datamodels.datamodelDataSource import DataSource
from modules.datamodels.datamodelFeatureDataSource import FeatureDataSource
from modules.shared.i18nRegistry import apiRouteContext
@ -52,7 +53,7 @@ def _updateDataSourceScope(
if scope not in _VALID_SCOPES:
raise HTTPException(status_code=400, detail=f"Invalid scope: {scope}. Must be one of {_VALID_SCOPES}")
if scope == "global" and not context.isSysAdmin:
if scope == "global" and not _hasSysAdminRole(context.user):
raise HTTPException(status_code=403, detail=routeApiMsg("Only sysadmins can set global scope"))
try:
@ -96,32 +97,3 @@ def _updateDataSourceNeutralize(
except Exception as e:
logger.error("Error updating datasource neutralize: %s", e)
raise HTTPException(status_code=500, detail=str(e))
@router.patch("/{sourceId}/neutralize-fields")
@limiter.limit("30/minute")
def _updateNeutralizeFields(
request: Request,
sourceId: str = Path(..., description="ID of the FeatureDataSource"),
neutralizeFields: List[str] = Body(..., embed=True),
context: RequestContext = Depends(getRequestContext),
) -> Dict[str, Any]:
"""Update the list of field names to neutralize on a FeatureDataSource."""
try:
from modules.interfaces.interfaceDbApp import getRootInterface
rootIf = getRootInterface()
rec = rootIf.db.getRecord(FeatureDataSource, sourceId)
if not rec:
raise HTTPException(status_code=404, detail=f"FeatureDataSource {sourceId} not found")
cleanFields = [f for f in neutralizeFields if f and isinstance(f, str)] if neutralizeFields else []
rootIf.db.recordModify(FeatureDataSource, sourceId, {
"neutralizeFields": cleanFields if cleanFields else None,
})
logger.info("Updated neutralizeFields=%s for FeatureDataSource %s", cleanFields, sourceId)
return {"sourceId": sourceId, "neutralizeFields": cleanFields, "updated": True}
except HTTPException:
raise
except Exception as e:
logger.error("Error updating neutralizeFields: %s", e)
raise HTTPException(status_code=500, detail=str(e))

View file

@ -6,7 +6,7 @@ Implements the endpoints for user management.
MULTI-TENANT: User management requires RequestContext.
- mandateId from X-Mandate-Id header determines which users are visible
- isPlatformAdmin can see all users across mandates
- SysAdmin can see all users across mandates
"""
from fastapi import APIRouter, HTTPException, Depends, Body, Path, Request, Response, Query
@ -34,10 +34,10 @@ logger = logging.getLogger(__name__)
def _isAdminForUser(context: RequestContext, targetUserId: str) -> bool:
"""
Check if the current user has admin rights for the target user.
PlatformAdmin can manage all users. MandateAdmin can manage users in their mandates.
SysAdmin can manage all users. MandateAdmin can manage users in their mandates.
Works without X-Mandate-Id header (admin pages don't send it).
"""
if context.isPlatformAdmin:
if context.hasSysAdminRole:
return True
# Find mandates where current user is admin
@ -90,7 +90,7 @@ def _getUserFilterOrIds(context, paginationJson, column=None, idsMode=False):
return handleIdsInMemory(items, paginationJson)
return handleFilterValuesInMemory(items, column, paginationJson, requestLang)
if context.isPlatformAdmin:
if context.hasSysAdminRole:
rootInterface = getRootInterface()
if idsMode:
return handleIdsMode(rootInterface.db, UserInDB, paginationJson)
@ -167,7 +167,7 @@ def get_user_options(
if context.mandateId:
result = appInterface.getUsersByMandate(str(context.mandateId), None)
users = result.items if hasattr(result, 'items') else result
elif context.isPlatformAdmin:
elif context.hasSysAdminRole:
users = appInterface.getAllUsers()
else:
raise HTTPException(status_code=403, detail=routeApiMsg("Access denied"))
@ -256,8 +256,8 @@ def get_users(
items=users,
pagination=None
)
elif context.isPlatformAdmin:
# PlatformAdmin without mandateId — DB-level pagination via interface
elif context.hasSysAdminRole:
# SysAdmin without mandateId — DB-level pagination via interface
result = appInterface.getAllUsers(paginationParams)
if paginationParams and hasattr(result, 'items'):
@ -375,8 +375,8 @@ def get_user(
detail=f"User with ID {userId} not found"
)
# MULTI-TENANT: Verify user is in the same mandate (unless PlatformAdmin)
if context.mandateId and not context.isPlatformAdmin:
# MULTI-TENANT: Verify user is in the same mandate (unless SysAdmin)
if context.mandateId and not context.hasSysAdminRole:
userMandate = appInterface.getUserMandate(userId, str(context.mandateId))
if not userMandate:
raise HTTPException(
@ -402,7 +402,6 @@ class CreateUserRequest(BaseModel):
language: str = "de"
enabled: bool = True
isSysAdmin: bool = False
isPlatformAdmin: bool = False
password: Optional[str] = None
@ -416,24 +415,10 @@ def create_user(
"""
Create a new user.
MULTI-TENANT: User is created and automatically added to the current mandate.
Privileged platform flags (isSysAdmin, isPlatformAdmin) may only be set
by a Platform Admin. Non-PlatformAdmin requests have these flags reset
to False with a warning.
"""
appInterface = interfaceDbApp.getInterface(context.user)
callerIsPlatformAdmin = context.isPlatformAdmin
requestedSysAdmin = bool(userData.isSysAdmin) and callerIsPlatformAdmin
requestedPlatformAdmin = bool(userData.isPlatformAdmin) and callerIsPlatformAdmin
if (userData.isSysAdmin or userData.isPlatformAdmin) and not callerIsPlatformAdmin:
logger.warning(
f"Non-PlatformAdmin {context.user.id} attempted to create user with "
f"privileged flags (isSysAdmin={userData.isSysAdmin}, "
f"isPlatformAdmin={userData.isPlatformAdmin}); flags reset to False"
)
# Extract fields from request model and call createUser with individual parameters
newUser = appInterface.createUser(
username=userData.username,
password=userData.password,
@ -442,10 +427,9 @@ def create_user(
language=userData.language,
enabled=userData.enabled,
authenticationAuthority=AuthAuthority.LOCAL,
isSysAdmin=requestedSysAdmin,
isPlatformAdmin=requestedPlatformAdmin,
isSysAdmin=userData.isSysAdmin
)
# MULTI-TENANT: Add user to current mandate via UserMandate with default "user" role
if context.mandateId:
userRole = appInterface.getRoleByLabel("user")
@ -454,14 +438,14 @@ def create_user(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=routeApiMsg("No 'user' role found in system — cannot assign user to mandate")
)
appInterface.createUserMandate(
userId=str(newUser.id),
mandateId=str(context.mandateId),
roleIds=[str(userRole.id)]
)
logger.info(f"Created UserMandate for user {newUser.id} in mandate {context.mandateId}")
return newUser
@router.put("/{userId}", response_model=User)
@ -469,67 +453,43 @@ def create_user(
def update_user(
request: Request,
userId: str = Path(..., description="ID of the user to update"),
userData: Dict[str, Any] = Body(..., description="Partial user payload — only the fields present in the request body are updated."),
userData: User = Body(...),
context: RequestContext = Depends(getRequestContext)
) -> User:
"""
Update an existing user (PARTIAL update).
The request body is treated as a **partial** patch: only the keys actually
sent are applied; missing keys leave the stored value untouched. This is
intentional sending a full ``User`` body would overwrite unrelated fields
(e.g. ``isSysAdmin``/``isPlatformAdmin``) with Pydantic defaults whenever a
client only ships a subset, which has historically caused privileged flags
to flip silently when toggling a single inline cell.
Update an existing user.
Self-service: Users can update their own profile (language, fullName, etc.).
Admin: MandateAdmin can update users in their mandates.
PlatformAdmin can update any user.
Privileged flag changes (isSysAdmin, isPlatformAdmin) require:
- caller has isPlatformAdmin=True, AND
- target is NOT the caller (Self-Protection).
Admin: MandateAdmin can update users in their mandates. SysAdmin for all.
"""
isSelfUpdate = str(context.user.id) == str(userId)
# Non-self updates require admin permission
if not isSelfUpdate and not _isAdminForUser(context, userId):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=routeApiMsg("Admin role required to update other users")
)
# Use rootInterface for user lookup/update (avoids RBAC filtering on User table)
rootInterface = getRootInterface()
# Check if the user exists
existingUser = rootInterface.getUser(userId)
if not existingUser:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"User with ID {userId} not found"
)
if not isinstance(userData, dict):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=routeApiMsg("User update payload must be a JSON object")
)
# Defensive: drop ``id`` from payload — userId comes from the path and
# tampering with it from the body must never silently rebind the row.
sanitizedPayload = {k: v for k, v in userData.items() if k != "id"}
callerIsPlatformAdmin = context.isPlatformAdmin
allowAdminFlagChange = callerIsPlatformAdmin and not isSelfUpdate
updatedUser = rootInterface.updateUser(
userId, sanitizedPayload, allowAdminFlagChange=allowAdminFlagChange
)
# Update user
updatedUser = rootInterface.updateUser(userId, userData)
if not updatedUser:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=routeApiMsg("Error updating the user")
)
return updatedUser
@router.post("/{userId}/reset-password")
@ -832,7 +792,7 @@ def delete_user(
) -> Dict[str, Any]:
"""
Delete a user.
MULTI-TENANT: Can only delete users in the same mandate (unless PlatformAdmin).
MULTI-TENANT: Can only delete users in the same mandate (unless SysAdmin).
"""
appInterface = interfaceDbApp.getInterface(context.user)
@ -844,8 +804,8 @@ def delete_user(
detail=f"User with ID {userId} not found"
)
# MULTI-TENANT: Verify user is in the same mandate (unless PlatformAdmin)
if context.mandateId and not context.isPlatformAdmin:
# MULTI-TENANT: Verify user is in the same mandate (unless SysAdmin)
if context.mandateId and not context.hasSysAdminRole:
userMandate = appInterface.getUserMandate(userId, str(context.mandateId))
if not userMandate:
raise HTTPException(

View file

@ -4,11 +4,9 @@
Public and authenticated routes for UI language sets (DB-backed i18n).
Architecture:
- xx = base set (meta): key = source plaintext (German or English, as written
in the code via ``t("...")``), value = UI context for AI
- xx = base set (meta): key = German plaintext, value = UI context for AI
- All languages (incl. de) are AI-generated translations from xx
- AI translation pipeline uses context from xx to disambiguate translations;
the prompt forces the output language to be exactly the requested target.
- AI translation pipeline uses context from xx to disambiguate translations
"""
from __future__ import annotations
@ -25,7 +23,7 @@ from fastapi import APIRouter, BackgroundTasks, Depends, File, HTTPException, Re
from fastapi.responses import Response
from pydantic import BaseModel, Field
from modules.auth import getCurrentUser, requireSysAdmin, requirePlatformAdmin
from modules.auth import getCurrentUser, requireSysAdminRole
from modules.connectors.connectorDbPostgre import _get_cached_connector
from modules.datamodels.datamodelAi import (
AiCallOptions,
@ -236,31 +234,17 @@ async def _translateBatch(
jsonPayload = json.dumps(payload, ensure_ascii=False)
systemPrompt = (
f"You are a professional translator for software UI texts. "
f"You receive a JSON array of objects: {{\"key\": \"source text\", \"context\": \"UI context\"}}. "
f"The source text is written in German OR English. "
f"The context describes where the text is used in the application (file, component). "
f"\n\n"
f"HARD REQUIREMENTS (must all be satisfied):\n"
f"1. OUTPUT LANGUAGE: every translated value MUST be written in {targetLanguageLabel} "
f"(ISO code \"{targetCode}\"). Never output in German or English if that is not "
f"the target language. No mixing of languages.\n"
f"2. If the source is already in the target language, keep it (do not re-translate, "
f"do not paraphrase).\n"
f"3. KEEP the exact JSON keys from the input — do NOT translate or modify the keys.\n"
f"4. KEEP placeholders like {{variable}}, {{count}}, %s, %(name)s exactly as they are.\n"
f"5. Preserve leading/trailing whitespace, punctuation and capitalisation pattern.\n"
f"6. Answer ONLY with a JSON object mapping source-key -> translated value in "
f"{targetLanguageLabel}. No markdown fences, no comments, no explanations.\n"
f"7. If a key cannot be translated (empty, pure symbols, URLs), return the source unchanged."
f"Du bist ein professioneller Übersetzer für Software-UI-Texte. "
f"Du erhältst ein JSON-Array mit Objekten: {{\"key\": \"deutscher Text\", \"context\": \"UI-Kontext\"}}. "
f"Der Kontext beschreibt, wo der Text in der Anwendung verwendet wird (Datei, Komponente). "
f"Übersetze jeden «key» ins {targetLanguageLabel} (ISO {targetCode}). "
f"Behalte Platzhalter wie {{variable}} exakt bei. "
f"Antworte NUR mit einem JSON-Objekt — Keys = deutsche Originaltexte, Values = Übersetzungen. "
f"Kein Markdown, kein Kommentar."
)
aiRequest = AiCallRequest(
prompt=(
f"Translate the following UI labels into {targetLanguageLabel} "
f"(ISO {targetCode}). Source may be German or English. "
f"Respond with a pure JSON object only.\n{jsonPayload}"
),
prompt=f"Übersetze diese UI-Labels:\n{jsonPayload}",
context=systemPrompt,
options=AiCallOptions(
operationType=OperationTypeEnum.DATA_GENERATE,
@ -842,7 +826,7 @@ async def _syncLanguageWithXx(db, code: str, userId: Optional[str], adminUser: O
@router.put("/sets/sync-xx")
async def sync_xx_master(
request: Request,
adminUser: User = Depends(requireSysAdmin),
adminUser: User = Depends(requireSysAdminRole),
):
"""Synchronise the xx base set from the frontend build artefact.
@ -860,7 +844,7 @@ async def sync_xx_master(
@router.get("/sets/{code}/sync-diff")
async def get_language_sync_diff(
code: str,
adminUser: User = Depends(requirePlatformAdmin),
adminUser: User = Depends(requireSysAdminRole),
):
"""How many keys would be added/removed vs xx before running a full sync (SysAdmin)."""
c = code.strip().lower()
@ -873,7 +857,7 @@ async def get_language_sync_diff(
@router.put("/sets/{code}")
async def update_language_set(
code: str,
adminUser: User = Depends(requirePlatformAdmin),
adminUser: User = Depends(requireSysAdminRole),
):
c = code.strip().lower()
if c in ("update-all", "sync-xx", "sync-de"):
@ -889,7 +873,7 @@ async def update_language_set(
@router.delete("/sets/{code}")
async def delete_language_set(
code: str,
adminUser: User = Depends(requirePlatformAdmin),
adminUser: User = Depends(requireSysAdminRole),
):
c = code.strip().lower()
if c in _PROTECTED_CODES:
@ -927,7 +911,7 @@ async def download_language_set(
@router.get("/export")
async def export_all_language_sets(
adminUser: User = Depends(requirePlatformAdmin),
adminUser: User = Depends(requireSysAdminRole),
):
db = getMgmtInterface(adminUser, mandateId=None).db
rows = db.getRecordset(UiLanguageSet)
@ -955,7 +939,7 @@ async def export_all_language_sets(
@router.post("/import")
async def import_language_sets(
file: UploadFile = File(...),
adminUser: User = Depends(requirePlatformAdmin),
adminUser: User = Depends(requireSysAdminRole),
):
if not file.filename or not file.filename.endswith(".json"):
raise HTTPException(status_code=400, detail=routeApiMsg("Nur .json-Dateien erlaubt."))

View file

@ -186,7 +186,7 @@ def create_invitation(
)
# Check admin permission
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
if str(context.mandateId) != mandateId:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
@ -891,7 +891,7 @@ def _hasMandateAdminRole(context: RequestContext) -> bool:
"""
Check if the user has mandate admin role in the current context.
"""
if context.isPlatformAdmin:
if context.hasSysAdminRole:
return True
if not context.roleIds:

View file

@ -121,7 +121,7 @@ async def _validateInstanceAccess(instanceId: str, context: RequestContext) -> s
status_code=400,
detail=f"Instance '{instanceId}' is not a realestate instance"
)
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
featureAccesses = rootInterface.getFeatureAccessesForUser(str(context.user.id))
hasAccess = any(
str(fa.featureInstanceId) == instanceId and fa.enabled

View file

@ -210,13 +210,13 @@ def _ensureHomeMandate(rootInterface, user) -> None:
except Exception as e:
logger.warning(f"Could not check pending invitations for {user.username}: {e}")
homeMandateLabel = f"Home {user.username}"
homeMandateName = f"Home {user.username}"
rootInterface._provisionMandateForUser(
userId=userId,
mandateLabel=homeMandateLabel,
mandateName=homeMandateName,
planKey="TRIAL_14D",
)
logger.info(f"Created Home mandate '{homeMandateLabel}' for user {user.username}")
logger.info(f"Created Home mandate '{homeMandateName}' for user {user.username}")
@router.post("/login")
@ -464,10 +464,10 @@ def register_user(
provisionResult = None
if not hasPendingInvitations:
try:
homeMandateLabel = f"Home {user.username}"
homeMandateName = f"Home {user.username}"
provisionResult = appInterface._provisionMandateForUser(
userId=str(user.id),
mandateLabel=homeMandateLabel,
mandateName=homeMandateName,
planKey="TRIAL_14D",
)
logger.info(f"Provisioned Home mandate for user {user.id}: {provisionResult}")
@ -881,7 +881,7 @@ def onboarding_provision(
"alreadyProvisioned": True,
}
mandateLabel = (companyName.strip() if companyName and companyName.strip()
mandateName = (companyName.strip() if companyName and companyName.strip()
else f"Home {currentUser.username}")
if planKey not in ("TRIAL_14D", "STARTER_MONTHLY", "STARTER_YEARLY", "PROFESSIONAL_MONTHLY", "PROFESSIONAL_YEARLY", "MAX_MONTHLY", "MAX_YEARLY"):
@ -889,7 +889,7 @@ def onboarding_provision(
result = appInterface._provisionMandateForUser(
userId=userId,
mandateLabel=mandateLabel,
mandateName=mandateName,
planKey=planKey,
)

View file

@ -59,13 +59,7 @@ class StoreFeatureResponse(BaseModel):
def _getStoreFeatures(catalogService) -> List[Dict[str, Any]]:
"""Get all features available in the store.
Soft-disabled features (``enabled=False`` in their feature definition) are
skipped so that legacy or temporarily-deactivated modules do not appear in
the storefront, even if their ``resource.store.*`` catalog object is still
registered.
"""
"""Get all features available in the store."""
resourceObjects = catalogService.getResourceObjects()
storeFeatures = []
for obj in resourceObjects:
@ -74,7 +68,7 @@ def _getStoreFeatures(catalogService) -> List[Dict[str, Any]]:
featureCode = meta.get("featureCode")
if featureCode:
featureDef = catalogService.getFeatureDefinition(featureCode)
if featureDef and featureDef.get("enabled", True):
if featureDef:
storeFeatures.append(featureDef)
return storeFeatures

View file

@ -46,7 +46,7 @@ def _resolveMandateId(context: RequestContext) -> str:
def _assertMandateAdmin(context: RequestContext, mandateId: str) -> None:
if context.isPlatformAdmin:
if context.hasSysAdminRole:
return
try:
from modules.interfaces.interfaceDbApp import getRootInterface
@ -303,7 +303,7 @@ def forceCancel(
context: RequestContext = Depends(getRequestContext),
):
"""Sysadmin: immediately expire any non-terminal subscription."""
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=routeApiMsg("Sysadmin role required"))
from modules.serviceCenter.services.serviceSubscription.mainServiceSubscription import (
@ -485,7 +485,7 @@ def getAllSubscriptions(
context: RequestContext = Depends(getRequestContext),
):
"""SysAdmin: list ALL subscriptions across all mandates with enriched metadata."""
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=routeApiMsg("Sysadmin role required"))
if mode == "filterValues":

View file

@ -478,7 +478,7 @@ def get_navigation(
Endpoint: GET /api/navigation
"""
try:
isSysAdmin = reqContext.isPlatformAdmin
isSysAdmin = reqContext.hasSysAdminRole
userId = str(reqContext.user.id) if reqContext.user else None
# Get user's role IDs for permission checking

View file

@ -17,7 +17,6 @@ from typing import Optional, Dict, Any, List
from modules.auth import getCurrentUser, getRequestContext, RequestContext, limiter
from modules.datamodels.datamodelUam import User
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface, VoiceObjects
from modules.shared.voiceCatalog import getCatalogPayload
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/voice-google", tags=["Voice Google"])
@ -62,15 +61,32 @@ def _getVoiceInterface(currentUser: User) -> VoiceObjects:
@router.get("/languages")
async def get_available_languages(currentUser: User = Depends(getCurrentUser)):
"""Return the curated voice/language catalog (single source of truth).
Each entry: {bcp47, iso, label, flag, defaultVoice}. Same payload as
/api/voice/languages both endpoints back the same catalog.
"""
return {
"success": True,
"languages": getCatalogPayload(),
}
"""Get available languages from Google Cloud Text-to-Speech."""
try:
logger.info("🌐 Getting available languages from Google Cloud TTS")
voiceInterface = _getVoiceInterface(currentUser)
result = await voiceInterface.getAvailableLanguages()
if result["success"]:
return {
"success": True,
"languages": result["languages"]
}
else:
raise HTTPException(
status_code=400,
detail=f"Failed to get languages: {result.get('error', 'Unknown error')}"
)
except HTTPException:
raise
except Exception as e:
logger.error(f"❌ Get languages error: {e}")
raise HTTPException(
status_code=500,
detail=f"Failed to get available languages: {str(e)}"
)
@router.get("/voices")
async def get_available_voices(

View file

@ -18,7 +18,6 @@ from modules.datamodels.datamodelUam import User, UserVoicePreferences, _normali
from modules.interfaces.interfaceDbApp import getRootInterface
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
from modules.shared.i18nRegistry import apiRouteContext
from modules.shared.voiceCatalog import getCatalogPayload
routeApiMsg = apiRouteContext("routeVoiceUser")
logger = logging.getLogger(__name__)
@ -102,11 +101,11 @@ async def getVoiceLanguages(
request: Request,
currentUser: User = Depends(getCurrentUser),
) -> Dict[str, Any]:
"""Return the curated voice/language catalog (single source of truth).
Each entry: {bcp47, iso, label, flag, defaultVoice}.
"""
return {"languages": getCatalogPayload()}
"""Return available TTS languages (user-level, no instance context needed)."""
voiceInterface = getVoiceInterface(currentUser)
languagesResult = await voiceInterface.getAvailableLanguages()
languageList = languagesResult.get("languages", []) if isinstance(languagesResult, dict) else languagesResult
return {"languages": languageList}
@router.get("/voices")

View file

@ -26,7 +26,6 @@ from modules.datamodels.datamodelPagination import PaginationParams, normalize_p
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import (
AutoRun, AutoStepLog, AutoWorkflow, AutoTask, AutoVersion,
)
from modules.features.graphicalEditor.interfaceFeatureGraphicalEditor import graphicalEditorDatabase
from modules.shared.i18nRegistry import apiRouteContext
routeApiMsg = apiRouteContext("routeWorkflowDashboard")
@ -36,11 +35,13 @@ limiter = Limiter(key_func=get_remote_address)
router = APIRouter(prefix="/api/system/workflow-runs", tags=["WorkflowDashboard"])
_GREENFIELD_DB = "poweron_graphicaleditor"
def _getDb() -> DatabaseConnector:
return DatabaseConnector(
dbHost=APP_CONFIG.get("DB_HOST", "localhost"),
dbDatabase=graphicalEditorDatabase,
dbDatabase=_GREENFIELD_DB,
dbUser=APP_CONFIG.get("DB_USER"),
dbPassword=APP_CONFIG.get("DB_PASSWORD_SECRET") or APP_CONFIG.get("DB_PASSWORD"),
dbPort=int(APP_CONFIG.get("DB_PORT", 5432)),
@ -106,7 +107,7 @@ def _scopedRunFilter(context: RequestContext) -> Optional[dict]:
- mandate admin: mandateId IN user's mandates
- normal user: ownerId = userId
"""
if context.isPlatformAdmin:
if context.hasSysAdminRole:
return None
userId = str(context.user.id) if context.user else None
@ -128,7 +129,7 @@ def _scopedWorkflowFilter(context: RequestContext) -> Optional[dict]:
- sysadmin: None (no filter, sees all)
- normal user: mandateId IN user's mandates
"""
if context.isPlatformAdmin:
if context.hasSysAdminRole:
return None
userId = str(context.user.id) if context.user else None
@ -144,7 +145,7 @@ def _scopedWorkflowFilter(context: RequestContext) -> Optional[dict]:
def _userMayDeleteWorkflow(context: RequestContext, wfMandateId: Optional[str]) -> bool:
"""Same rules as canDelete on rows in get_system_workflows."""
if context.isPlatformAdmin:
if context.hasSysAdminRole:
return True
userId = str(context.user.id) if context.user else None
if not userId or not wfMandateId:
@ -477,7 +478,7 @@ def get_system_workflows(
userId = str(context.user.id) if context.user else None
adminMandateIds = []
if userId and not context.isPlatformAdmin:
if userId and not context.hasSysAdminRole:
userMandateIds = _getUserMandateIds(userId)
adminMandateIds = _getAdminMandateIds(userId, userMandateIds)
@ -514,7 +515,7 @@ def get_system_workflows(
row["runCount"] = runCountMap.get(wfId, 0)
row["lastStartedAt"] = lastStartedMap.get(wfId)
if context.isPlatformAdmin:
if context.hasSysAdminRole:
row["canEdit"] = True
row["canDelete"] = True
row["canExecute"] = True
@ -670,7 +671,7 @@ def get_run_steps(
raise HTTPException(status_code=404, detail=routeApiMsg("Run not found"))
run = dict(runs[0])
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
userId = str(context.user.id) if context.user else None
runOwner = run.get("ownerId")
runMandate = run.get("mandateId")
@ -711,7 +712,7 @@ async def get_run_stream(
raise HTTPException(status_code=404, detail=routeApiMsg("Run not found"))
run = dict(runs[0])
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
userId = str(context.user.id) if context.user else None
runOwner = run.get("ownerId")
runMandate = run.get("mandateId")
@ -774,7 +775,7 @@ def stop_workflow_run(
raise HTTPException(status_code=404, detail=routeApiMsg("Run not found"))
run = dict(runs[0])
if not context.isPlatformAdmin:
if not context.hasSysAdminRole:
userId = str(context.user.id) if context.user else None
runOwner = run.get("ownerId")
runMandate = run.get("mandateId")

View file

@ -84,38 +84,10 @@ class RbacCatalogService:
logger.error(f"Failed to register DATA object {objectKey}: {e}")
return False
def registerFeatureDefinition(
self,
featureCode: str,
label: str,
icon: str,
*,
instantiable: bool = True,
enabled: bool = True,
) -> bool:
"""Register a feature definition.
Args:
featureCode: Stable code (e.g. ``"trustee"``).
label: Display label.
icon: Display icon.
instantiable: ``False`` for meta-features that must NOT be exposed
as a creatable Feature-Instance (e.g. the ``system`` umbrella
feature which only owns global UI/DATA/RESOURCE catalog
objects). Defaults to ``True``.
enabled: ``False`` to soft-disable a feature so it is filtered out
of selection lists (Store / Admin Feature-Instances dropdown)
without removing its catalog objects, role templates or
already-provisioned instances. Defaults to ``True``.
"""
def registerFeatureDefinition(self, featureCode: str, label: str, icon: str) -> bool:
"""Register a feature definition."""
try:
self._featureDefinitions[featureCode] = {
"code": featureCode,
"label": label,
"icon": icon,
"instantiable": bool(instantiable),
"enabled": bool(enabled),
}
self._featureDefinitions[featureCode] = {"code": featureCode, "label": label, "icon": icon}
return True
except Exception as e:
logger.error(f"Failed to register feature definition {featureCode}: {e}")

View file

@ -9,7 +9,6 @@ from modules.serviceCenter.services.serviceAgent.datamodelAgent import ToolResul
from modules.serviceCenter.services.serviceAgent.toolRegistry import ToolRegistry
from modules.serviceCenter.services.serviceAgent.coreTools._helpers import (
_buildResolverDbFromServices,
_getOrCreateTempFolder,
_looksLikeBinary,
_resolveFileScope,
@ -23,6 +22,20 @@ def _registerConnectionTools(registry: ToolRegistry, services):
"""Auto-extracted from registerCoreTools."""
# ---- Connection tools (external data sources) ----
def _buildResolverDb():
"""Build a DB adapter that ConnectorResolver can use to load UserConnections.
interfaceDbApp has getUserConnectionById; ConnectorResolver expects getUserConnection."""
chatService = services.chat
appIf = getattr(chatService, "interfaceDbApp", None)
if appIf and hasattr(appIf, "getUserConnectionById"):
class _Adapter:
def __init__(self, app):
self._app = app
def getUserConnection(self, connectionId: str):
return self._app.getUserConnectionById(connectionId)
return _Adapter(appIf)
return getattr(chatService, "interfaceDbComponent", None)
async def _listConnections(args: Dict[str, Any], context: Dict[str, Any]):
try:
chatService = services.chat
@ -36,12 +49,7 @@ def _registerConnectionTools(registry: ToolRegistry, services):
authorityVal = authority.value if hasattr(authority, "value") else str(authority)
username = conn.get("externalUsername", "") if isinstance(conn, dict) else getattr(conn, "externalUsername", "")
email = conn.get("externalEmail", "") if isinstance(conn, dict) else getattr(conn, "externalEmail", "")
cid = conn.get("id", "") if isinstance(conn, dict) else getattr(conn, "id", "")
ref = f"connection:{authorityVal}:{username}"
lines.append(
f"- {ref} connectionId={cid} ({email}) "
f"(use this full connection: line or connectionId as connectionReference)"
)
lines.append(f"- connectionId: {connId} | {authorityVal} | {username} ({email})")
return ToolResult(toolCallId="", toolName="listConnections", success=True, data="\n".join(lines))
except Exception as e:
return ToolResult(toolCallId="", toolName="listConnections", success=False, error=str(e))
@ -57,7 +65,7 @@ def _registerConnectionTools(registry: ToolRegistry, services):
from modules.connectors.connectorResolver import ConnectorResolver
resolver = ConnectorResolver(
services.getService("security"),
_buildResolverDbFromServices(services),
_buildResolverDb(),
)
adapter = await resolver.resolveService(connectionId, service)
chatService = services.chat
@ -107,7 +115,7 @@ def _registerConnectionTools(registry: ToolRegistry, services):
from modules.connectors.connectorResolver import ConnectorResolver
resolver = ConnectorResolver(
services.getService("security"),
_buildResolverDbFromServices(services),
_buildResolverDb(),
)
adapter = await resolver.resolveService(connectionId, "outlook")

View file

@ -9,7 +9,6 @@ from modules.serviceCenter.services.serviceAgent.datamodelAgent import ToolResul
from modules.serviceCenter.services.serviceAgent.toolRegistry import ToolRegistry
from modules.serviceCenter.services.serviceAgent.coreTools._helpers import (
_buildResolverDbFromServices,
_getOrCreateTempFolder,
_looksLikeBinary,
_resolveFileScope,
@ -89,7 +88,7 @@ def _registerDataSourceTools(registry: ToolRegistry, services):
from modules.connectors.connectorResolver import ConnectorResolver
resolver = ConnectorResolver(
services.getService("security"),
_buildResolverDbFromServices(services),
_buildResolverDb(),
)
adapter = await resolver.resolveService(connectionId, service)
entries = await adapter.browse(browsePath, filter=args.get("filter"))
@ -125,7 +124,7 @@ def _registerDataSourceTools(registry: ToolRegistry, services):
from modules.connectors.connectorResolver import ConnectorResolver
resolver = ConnectorResolver(
services.getService("security"),
_buildResolverDbFromServices(services),
_buildResolverDb(),
)
adapter = await resolver.resolveService(connectionId, service)
entries = await adapter.search(query, path=basePath)
@ -161,7 +160,7 @@ def _registerDataSourceTools(registry: ToolRegistry, services):
fullPath = filePath if filePath.startswith("/") else f"{basePath.rstrip('/')}/{filePath}"
resolver = ConnectorResolver(
services.getService("security"),
_buildResolverDbFromServices(services),
_buildResolverDb(),
)
adapter = await resolver.resolveService(connectionId, service)
result = await adapter.download(fullPath)

View file

@ -2,7 +2,6 @@
# All rights reserved.
"""Document and vision tools (containers, content objects, image description)."""
import json
import logging
from typing import Any, Dict, List, Optional
@ -19,76 +18,6 @@ from modules.serviceCenter.services.serviceAgent.coreTools._helpers import (
logger = logging.getLogger(__name__)
def _parseUdmJson(raw: Any) -> Optional[Dict[str, Any]]:
if raw is None:
return None
if isinstance(raw, dict):
return raw
if isinstance(raw, str) and raw.strip():
try:
data = json.loads(raw)
return data if isinstance(data, dict) else None
except json.JSONDecodeError:
return None
return None
def _walkUdmBlocksImpl(udm: Dict[str, Any], out: List[Dict[str, Any]], path: str) -> None:
if udm.get("contentType"):
raw = udm.get("raw") or ""
preview = raw[:240] + ("" if len(raw) > 240 else "")
out.append({
"path": path,
"id": udm.get("id"),
"contentType": udm.get("contentType"),
"rawPreview": preview,
})
children = udm.get("children") or []
for i, ch in enumerate(children):
if isinstance(ch, dict):
role = ch.get("role") or "node"
label = f"{path}/children[{i}]"
if ch.get("role") in ("page", "section", "slide", "sheet"):
label = f"{path}/{role}[{ch.get('index', i)}]"
_walkUdmBlocksImpl(ch, out, label)
def _getUdmStructureText(udm: Dict[str, Any]) -> str:
lines = [
f"id: {udm.get('id', '?')}",
f"role: {udm.get('role', '?')}",
f"sourceType: {udm.get('sourceType', '?')}",
f"sourcePath: {udm.get('sourcePath', '')}",
]
nodes = udm.get("children") or []
lines.append(f"structuralNodes (top-level): {len(nodes)}")
for i, sn in enumerate(nodes[:80]):
if isinstance(sn, dict):
role = sn.get("role", "?")
idx = sn.get("index", i)
lab = sn.get("label") or ""
blocks = sn.get("children") or []
lines.append(f" [{i}] {role} index={idx} label={lab!r} contentBlocks={len(blocks)}")
if len(nodes) > 80:
lines.append(f" … and {len(nodes) - 80} more structural nodes")
return "\n".join(lines)
def _filterUdmByTypeImpl(udm: Dict[str, Any], content_type: str) -> Dict[str, Any]:
hits: List[Dict[str, Any]] = []
def collect(node: Any) -> None:
if not isinstance(node, dict):
return
if node.get("contentType") == content_type:
hits.append(dict(node))
for child in node.get("children") or []:
collect(child)
collect(udm)
return {"nodes": hits, "count": len(hits), "contentType": content_type}
def _registerDocumentTools(registry: ToolRegistry, services):
"""Auto-extracted from registerCoreTools."""
# ---- Document tools (Smart Documents / Container Handling) ----
@ -276,91 +205,6 @@ def _registerDocumentTools(registry: ToolRegistry, services):
readOnly=True,
)
# ---- UDM (Unified Document Model) tools ----
async def _getUdmStructure(args: Dict[str, Any], context: Dict[str, Any]):
udm = _parseUdmJson(args.get("udmJson") or args.get("udm"))
if not udm:
return ToolResult(toolCallId="", toolName="getUdmStructure", success=False, error="udmJson must be a JSON object or string")
text = _getUdmStructureText(udm)
return ToolResult(toolCallId="", toolName="getUdmStructure", success=True, data=text)
async def _walkUdmBlocks(args: Dict[str, Any], context: Dict[str, Any]):
udm = _parseUdmJson(args.get("udmJson") or args.get("udm"))
if not udm:
return ToolResult(toolCallId="", toolName="walkUdmBlocks", success=False, error="udmJson must be a JSON object or string")
blocks: List[Dict[str, Any]] = []
_walkUdmBlocksImpl(udm, blocks, "document")
max_n = int(args.get("maxResults") or 200)
trimmed = blocks[:max_n]
lines = [f"Total content blocks found: {len(blocks)} (showing {len(trimmed)})"]
for b in trimmed:
lines.append(f"{b.get('path')} | {b.get('contentType')} | id={b.get('id')}")
if b.get("rawPreview"):
lines.append(f" preview: {b['rawPreview'][:120]}")
if len(blocks) > max_n:
lines.append(f"... {len(blocks) - max_n} more not shown (increase maxResults)")
return ToolResult(toolCallId="", toolName="walkUdmBlocks", success=True, data="\n".join(lines))
async def _filterUdmByType(args: Dict[str, Any], context: Dict[str, Any]):
udm = _parseUdmJson(args.get("udmJson") or args.get("udm"))
content_type = (args.get("contentType") or "").strip()
if not udm:
return ToolResult(toolCallId="", toolName="filterUdmByType", success=False, error="udmJson is required")
if not content_type:
return ToolResult(toolCallId="", toolName="filterUdmByType", success=False, error="contentType is required")
filtered = _filterUdmByTypeImpl(udm, content_type)
return ToolResult(
toolCallId="",
toolName="filterUdmByType",
success=True,
data=json.dumps(filtered, ensure_ascii=False, default=str)[:_MAX_TOOL_RESULT_CHARS],
)
registry.register(
"getUdmStructure",
_getUdmStructure,
description="Summarize hierarchy of a Unified Document Model (UDM) JSON: ids, sourceType, structural nodes and block counts. Pass udmJson as stringified JSON.",
parameters={
"type": "object",
"properties": {
"udmJson": {"type": "string", "description": "Stringified UDM document object (Document → StructuralNode → ContentBlock)"},
},
"required": ["udmJson"],
},
readOnly=True,
)
registry.register(
"walkUdmBlocks",
_walkUdmBlocks,
description="Depth-first walk over a UDM tree; lists each ContentBlock with path, id, type, and short text preview.",
parameters={
"type": "object",
"properties": {
"udmJson": {"type": "string", "description": "Stringified UDM document"},
"maxResults": {"type": "integer", "description": "Max blocks to return (default 200)"},
},
"required": ["udmJson"],
},
readOnly=True,
)
registry.register(
"filterUdmByType",
_filterUdmByType,
description="Return all ContentBlocks in a UDM tree whose contentType matches (e.g. table, image, text).",
parameters={
"type": "object",
"properties": {
"udmJson": {"type": "string", "description": "Stringified UDM document"},
"contentType": {"type": "string", "description": "contentType to match (text, image, table, code, media, link, formula)"},
},
"required": ["udmJson", "contentType"],
},
readOnly=True,
)
# ---- Vision tool ----
async def _describeImage(args: Dict[str, Any], context: Dict[str, Any]):

Some files were not shown because too many files have changed in this diff Show more