BREAKING CHANGE

API and persisted records use PowerOnModel system fields:
- sysCreatedAt, sysCreatedBy, sysModifiedAt, sysModifiedBy
Removed legacy JSON/DB field names:
- _createdAt, _createdBy, _modifiedAt, _modifiedBy
Frontend (frontend_nyla) and gateway call sites were updated accordingly.
Database:
- Bootstrap runs idempotent backfill (_migrateSystemFieldColumns) from old
  underscore columns and selected business duplicates into sys* where sys* IS NULL.
- Re-run app bootstrap against each PostgreSQL database after deploy.
- Optional: DROP INDEX IF EXISTS "idx_invitation_createdby" if an old index remains;
  new index: idx_invitation_syscreatedby on Invitation(sysCreatedBy).
Tests:
- RBAC integration tests aligned with current GROUP mandate filter and UserMandate-based
  UserConnection GROUP clause; buildRbacWhereClause(..., mandateId=...) must be passed
  explicitly (same as production request context).
This commit is contained in:
ValueOn AG 2026-03-28 18:12:37 +01:00
parent efe540b4f9
commit 75484c0f73
55 changed files with 624 additions and 485 deletions

View file

@ -12,6 +12,7 @@ import threading
from modules.shared.timeUtils import getUtcTimestamp
from modules.shared.configuration import APP_CONFIG
from modules.datamodels.datamodelBase import PowerOnModel
from modules.datamodels.datamodelUam import User, AccessLevel, UserPermissions
from modules.datamodels.datamodelRbac import AccessRule, AccessRuleContext
@ -20,7 +21,7 @@ logger = logging.getLogger(__name__)
# No mapping needed - table name = Pydantic model name exactly
class SystemTable(BaseModel):
class SystemTable(PowerOnModel):
"""Data model for system table entries"""
table_name: str = Field(
@ -178,7 +179,7 @@ def _get_cached_connector(
userId: str = None,
) -> "DatabaseConnector":
"""Return cached DatabaseConnector for same (host, database, port) to avoid duplicate PostgreSQL inits.
Uses contextvars for userId so concurrent requests sharing the same connector get correct _createdBy/_modifiedBy.
Uses contextvars for userId so concurrent requests sharing the same connector get correct sysCreatedBy/sysModifiedBy.
"""
port = int(dbPort) if dbPort is not None else 5432
key = (dbHost, dbDatabase, port)
@ -327,8 +328,10 @@ class DatabaseConnector:
id SERIAL PRIMARY KEY,
table_name VARCHAR(255) UNIQUE NOT NULL,
initial_id VARCHAR(255) NOT NULL,
_createdAt DOUBLE PRECISION,
_modifiedAt DOUBLE PRECISION
"sysCreatedAt" DOUBLE PRECISION,
"sysCreatedBy" VARCHAR(255),
"sysModifiedAt" DOUBLE PRECISION,
"sysModifiedBy" VARCHAR(255)
)
""")
conn.close()
@ -416,7 +419,7 @@ class DatabaseConnector:
for table_name, initial_id in data.items():
cursor.execute(
"""
INSERT INTO "_system" ("table_name", "initial_id", "_modifiedAt")
INSERT INTO "_system" ("table_name", "initial_id", "sysModifiedAt")
VALUES (%s, %s, %s)
""",
(table_name, initial_id, getUtcTimestamp()),
@ -448,8 +451,10 @@ class DatabaseConnector:
CREATE TABLE "{self._systemTableName}" (
"table_name" VARCHAR(255) PRIMARY KEY,
"initial_id" VARCHAR(255),
"_createdAt" DOUBLE PRECISION,
"_modifiedAt" DOUBLE PRECISION
"sysCreatedAt" DOUBLE PRECISION,
"sysCreatedBy" VARCHAR(255),
"sysModifiedAt" DOUBLE PRECISION,
"sysModifiedBy" VARCHAR(255)
)
""")
logger.info("System table created successfully")
@ -464,10 +469,16 @@ class DatabaseConnector:
)
existing_columns = [row["column_name"] for row in cursor.fetchall()]
if "_modifiedAt" not in existing_columns:
cursor.execute(
f'ALTER TABLE "{self._systemTableName}" ADD COLUMN "_modifiedAt" DOUBLE PRECISION'
)
for sys_col, sys_sql in [
("sysCreatedAt", "DOUBLE PRECISION"),
("sysCreatedBy", "VARCHAR(255)"),
("sysModifiedAt", "DOUBLE PRECISION"),
("sysModifiedBy", "VARCHAR(255)"),
]:
if sys_col not in existing_columns:
cursor.execute(
f'ALTER TABLE "{self._systemTableName}" ADD COLUMN "{sys_col}" {sys_sql}'
)
return True
except Exception as e:
@ -518,11 +529,7 @@ class DatabaseConnector:
# Desired columns based on model
model_fields = _get_model_fields(model_class)
desired_columns = (
set(["id"])
| set(model_fields.keys())
| {"_createdAt", "_modifiedAt", "_createdBy", "_modifiedBy"}
)
desired_columns = set(["id"]) | set(model_fields.keys())
# Add missing columns
for col in sorted(desired_columns - existing_columns):
@ -530,12 +537,6 @@ class DatabaseConnector:
if col in ["id"]:
continue # primary key exists already
sql_type = model_fields.get(col)
if col in ["_createdAt"]:
sql_type = "DOUBLE PRECISION"
elif col in ["_modifiedAt"]:
sql_type = "DOUBLE PRECISION"
elif col in ["_createdBy", "_modifiedBy"]:
sql_type = "VARCHAR(255)"
if not sql_type:
sql_type = "TEXT"
try:
@ -594,16 +595,6 @@ class DatabaseConnector:
if field_name != "id": # Skip id, already defined
columns.append(f'"{field_name}" {sql_type}')
# Add metadata columns
columns.extend(
[
'"_createdAt" DOUBLE PRECISION',
'"_modifiedAt" DOUBLE PRECISION',
'"_createdBy" VARCHAR(255)',
'"_modifiedBy" VARCHAR(255)',
]
)
# Create table
sql = f'CREATE TABLE IF NOT EXISTS "{table}" ({", ".join(columns)})'
cursor.execute(sql)
@ -626,11 +617,7 @@ class DatabaseConnector:
"""Save record to normalized table with explicit columns."""
# Get columns from Pydantic model instead of database schema
fields = _get_model_fields(model_class)
columns = (
["id"]
+ [field for field in fields.keys() if field != "id"]
+ ["_createdAt", "_createdBy", "_modifiedAt", "_modifiedBy"]
)
columns = ["id"] + [field for field in fields.keys() if field != "id"]
if not columns:
logger.error(f"No columns found for table {table}")
@ -648,7 +635,7 @@ class DatabaseConnector:
value = filtered_record.get(col)
# Handle timestamp fields - store as Unix timestamps (floats) for consistency
if col in ["_createdAt", "_modifiedAt"] and value is not None:
if col in ["sysCreatedAt", "sysModifiedAt"] and value is not None:
if isinstance(value, str):
# Try to parse string as timestamp
try:
@ -690,7 +677,7 @@ class DatabaseConnector:
[
f'"{col}" = EXCLUDED."{col}"'
for col in columns[1:]
if col not in ["_createdAt", "_createdBy"]
if col not in ["sysCreatedAt", "sysCreatedBy"]
]
)
@ -742,17 +729,18 @@ class DatabaseConnector:
if effective_user_id is None:
effective_user_id = self.userId
currentTime = getUtcTimestamp()
# Set _createdAt and _createdBy if this is a new record (record doesn't have _createdAt)
if "_createdAt" not in record:
record["_createdAt"] = currentTime
# Set sysCreatedAt/sysCreatedBy on first persist; always refresh modified fields.
# Use falsy check: model_dump() always includes sysCreatedAt key (often None).
if not record.get("sysCreatedAt"):
record["sysCreatedAt"] = currentTime
if effective_user_id:
record["_createdBy"] = effective_user_id
elif "_createdBy" not in record or not record.get("_createdBy"):
record["sysCreatedBy"] = effective_user_id
elif not record.get("sysCreatedBy"):
if effective_user_id:
record["_createdBy"] = effective_user_id
record["_modifiedAt"] = currentTime
record["sysCreatedBy"] = effective_user_id
record["sysModifiedAt"] = currentTime
if effective_user_id:
record["_modifiedBy"] = effective_user_id
record["sysModifiedBy"] = effective_user_id
with self.connection.cursor() as cursor:
self._save_record(cursor, table, recordId, record, model_class)
@ -840,6 +828,26 @@ class DatabaseConnector:
logger.error(f"Error removing initial ID for table {table}: {e}")
return False
def buildRbacWhereClause(
self,
permissions: UserPermissions,
currentUser: User,
table: str,
mandateId: Optional[str] = None,
featureInstanceId: Optional[str] = None,
) -> Optional[Dict[str, Any]]:
"""Delegate to interfaceRbac.buildRbacWhereClause (tests and call sites use connector as entry)."""
from modules.interfaces.interfaceRbac import buildRbacWhereClause as _buildRbacWhereClause
return _buildRbacWhereClause(
permissions,
currentUser,
table,
self,
mandateId=mandateId,
featureInstanceId=featureInstanceId,
)
def updateContext(self, userId: str) -> None:
"""Updates the context of the database connector.
Sets both instance userId and contextvar for request-scoped use when connector is shared.
@ -992,10 +1000,6 @@ class DatabaseConnector:
Returns (where_clause, order_clause, limit_clause, values, count_values).
"""
fields = _get_model_fields(model_class)
fields["_createdAt"] = "DOUBLE PRECISION"
fields["_modifiedAt"] = "DOUBLE PRECISION"
fields["_createdBy"] = "TEXT"
fields["_modifiedBy"] = "TEXT"
validColumns = set(fields.keys())
where_parts: List[str] = []
values: List[Any] = []
@ -1190,10 +1194,6 @@ class DatabaseConnector:
"""
table = model_class.__name__
fields = _get_model_fields(model_class)
fields["_createdAt"] = "DOUBLE PRECISION"
fields["_modifiedAt"] = "DOUBLE PRECISION"
fields["_createdBy"] = "TEXT"
fields["_modifiedBy"] = "TEXT"
if column not in fields:
return []

View file

@ -0,0 +1,68 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""Base Pydantic model with system-managed fields (DB + API + UI metadata)."""
from typing import Optional
from pydantic import BaseModel, Field
from modules.shared.attributeUtils import registerModelLabels
class PowerOnModel(BaseModel):
sysCreatedAt: Optional[float] = Field(
default=None,
description="Record creation timestamp (UTC, set by system)",
json_schema_extra={
"frontend_type": "timestamp",
"frontend_readonly": True,
"frontend_required": False,
"frontend_visible": False,
"system": True,
},
)
sysCreatedBy: Optional[str] = Field(
default=None,
description="User ID who created this record (set by system)",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"frontend_visible": False,
"system": True,
},
)
sysModifiedAt: Optional[float] = Field(
default=None,
description="Record last modification timestamp (UTC, set by system)",
json_schema_extra={
"frontend_type": "timestamp",
"frontend_readonly": True,
"frontend_required": False,
"frontend_visible": False,
"system": True,
},
)
sysModifiedBy: Optional[str] = Field(
default=None,
description="User ID who last modified this record (set by system)",
json_schema_extra={
"frontend_type": "text",
"frontend_readonly": True,
"frontend_required": False,
"frontend_visible": False,
"system": True,
},
)
registerModelLabels(
"PowerOnModel",
{"en": "Base Record", "de": "Basisdatensatz"},
{
"sysCreatedAt": {"en": "Created At", "de": "Erstellt am", "fr": "Cree le"},
"sysCreatedBy": {"en": "Created By", "de": "Erstellt von", "fr": "Cree par"},
"sysModifiedAt": {"en": "Modified At", "de": "Geaendert am", "fr": "Modifie le"},
"sysModifiedBy": {"en": "Modified By", "de": "Geaendert von", "fr": "Modifie par"},
},
)

View file

@ -6,6 +6,7 @@ from typing import List, Dict, Any, Optional
from enum import Enum
from datetime import date, datetime, timezone
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
import uuid
@ -48,7 +49,7 @@ class PeriodTypeEnum(str, Enum):
YEAR = "YEAR"
class BillingAccount(BaseModel):
class BillingAccount(PowerOnModel):
"""Billing account for mandate or user-mandate combination."""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
@ -78,7 +79,7 @@ registerModelLabels(
)
class BillingTransaction(BaseModel):
class BillingTransaction(PowerOnModel):
"""Single billing transaction (credit, debit, adjustment)."""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()), description="Primary key"

View file

@ -5,12 +5,13 @@
from typing import List, Dict, Any, Optional
from enum import Enum
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
from modules.shared.timeUtils import getUtcTimestamp
import uuid
class ChatLog(BaseModel):
class ChatLog(PowerOnModel):
"""Log entries for chat workflows. User-owned, no mandate context."""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
@ -56,7 +57,7 @@ registerModelLabels(
)
class ChatDocument(BaseModel):
class ChatDocument(PowerOnModel):
"""Documents attached to chat messages. User-owned, no mandate context."""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
@ -163,7 +164,7 @@ registerModelLabels(
)
class ChatMessage(BaseModel):
class ChatMessage(PowerOnModel):
"""Messages in chat workflows. User-owned, no mandate context."""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()), description="Primary key"
@ -260,7 +261,7 @@ registerModelLabels(
)
class ChatWorkflow(BaseModel):
class ChatWorkflow(PowerOnModel):
"""Chat workflow container. User-owned, no mandate context."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
featureInstanceId: Optional[str] = Field(None, description="Feature instance ID for multi-tenancy isolation", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})

View file

@ -8,12 +8,12 @@ Google Drive folder, FTP directory, etc.) for agent-accessible data containers.
from typing import Dict, Any, Optional
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
from modules.shared.timeUtils import getUtcTimestamp
import uuid
class DataSource(BaseModel):
class DataSource(PowerOnModel):
"""Configured external data source linked to a UserConnection."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
connectionId: str = Field(description="FK to UserConnection")
@ -29,7 +29,6 @@ class DataSource(BaseModel):
userId: str = Field(default="", description="Owner user ID")
autoSync: bool = Field(default=False, description="Automatically sync on schedule")
lastSynced: Optional[float] = Field(default=None, description="Last sync timestamp")
createdAt: float = Field(default_factory=getUtcTimestamp, description="Creation timestamp")
scope: str = Field(
default="personal",
description="Data visibility scope: personal, featureInstance, mandate, global",
@ -62,7 +61,6 @@ registerModelLabels(
"userId": {"en": "User ID", "de": "Benutzer-ID", "fr": "ID utilisateur"},
"autoSync": {"en": "Auto Sync", "de": "Auto-Sync", "fr": "Synchro auto"},
"lastSynced": {"en": "Last Synced", "de": "Letzter Sync", "fr": "Dernier sync"},
"createdAt": {"en": "Created At", "de": "Erstellt am", "fr": "Créé le"},
"scope": {"en": "Scope", "de": "Sichtbarkeit"},
"neutralize": {"en": "Neutralize", "de": "Neutralisieren"},
},

View file

@ -8,12 +8,12 @@ so the agent can query structured feature data (e.g. TrusteePosition rows).
from typing import Optional
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
from modules.shared.timeUtils import getUtcTimestamp
import uuid
class FeatureDataSource(BaseModel):
class FeatureDataSource(PowerOnModel):
"""A feature-instance table attached as data source in the AI workspace."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
featureInstanceId: str = Field(description="FK to FeatureInstance")
@ -24,7 +24,6 @@ class FeatureDataSource(BaseModel):
mandateId: str = Field(default="", description="Mandate scope")
userId: str = Field(default="", description="Owner user ID")
workspaceInstanceId: str = Field(description="Workspace instance where this source is used")
createdAt: float = Field(default_factory=getUtcTimestamp, description="Creation timestamp")
scope: str = Field(
default="personal",
description="Data visibility scope: personal, featureInstance, mandate, global",
@ -55,6 +54,5 @@ registerModelLabels(
"mandateId": {"en": "Mandate", "de": "Mandant", "fr": "Mandat"},
"userId": {"en": "User", "de": "Benutzer", "fr": "Utilisateur"},
"workspaceInstanceId": {"en": "Workspace", "de": "Workspace", "fr": "Espace de travail"},
"createdAt": {"en": "Created At", "de": "Erstellt am", "fr": "Créé le"},
},
)

View file

@ -5,11 +5,12 @@
import uuid
from typing import Optional, Dict, Any
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
from modules.datamodels.datamodelUtils import TextMultilingual
class Feature(BaseModel):
class Feature(PowerOnModel):
"""
Feature-Definition (global, z.B. 'trustee', 'chatbot').
Features sind die verfügbaren Funktionalitäten der Plattform.
@ -40,7 +41,7 @@ registerModelLabels(
)
class FeatureInstance(BaseModel):
class FeatureInstance(PowerOnModel):
"""
Instanz eines Features in einem Mandanten.
Ein Mandant kann mehrere Instanzen desselben Features haben.

View file

@ -4,18 +4,17 @@
from typing import Optional
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
from modules.shared.timeUtils import getUtcTimestamp
import uuid
class FileFolder(BaseModel):
class FileFolder(PowerOnModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
name: str = Field(description="Folder name", json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": True})
parentId: Optional[str] = Field(default=None, description="Parent folder ID (null = root)", json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": False})
mandateId: Optional[str] = Field(default=None, description="Mandate context", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
featureInstanceId: Optional[str] = Field(default=None, description="Feature instance context", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
createdAt: float = Field(default_factory=getUtcTimestamp, description="Creation timestamp", json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False})
registerModelLabels(
@ -27,6 +26,5 @@ registerModelLabels(
"parentId": {"en": "Parent Folder", "fr": "Dossier parent"},
"mandateId": {"en": "Mandate ID", "fr": "ID du mandat"},
"featureInstanceId": {"en": "Feature Instance ID", "fr": "ID de l'instance"},
"createdAt": {"en": "Created At", "fr": "Créé le"},
},
)

View file

@ -3,15 +3,14 @@
"""File-related datamodels: FileItem, FilePreview, FileData."""
from typing import Dict, Any, List, Optional, Union
from pydantic import BaseModel, ConfigDict, Field
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
from modules.shared.timeUtils import getUtcTimestamp
import uuid
import base64
class FileItem(BaseModel):
model_config = ConfigDict(extra='allow') # Preserve system fields (_createdBy, _createdAt, etc.)
class FileItem(PowerOnModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
mandateId: Optional[str] = Field(default="", description="ID of the mandate this file belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
featureInstanceId: Optional[str] = Field(default="", description="ID of the feature instance this file belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False, "frontend_fk_source": "/api/features/instances", "frontend_fk_display_field": "label"})
@ -19,7 +18,6 @@ class FileItem(BaseModel):
mimeType: str = Field(description="MIME type of the file", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
fileHash: str = Field(description="Hash of the file", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
fileSize: int = Field(description="Size of the file in bytes", json_schema_extra={"frontend_type": "integer", "frontend_readonly": True, "frontend_required": False})
creationDate: float = Field(default_factory=getUtcTimestamp, description="Date when the file was created (UTC timestamp in seconds)", json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False})
tags: Optional[List[str]] = Field(default=None, description="Tags for categorization and search", json_schema_extra={"frontend_type": "tags", "frontend_readonly": False, "frontend_required": False})
folderId: Optional[str] = Field(default=None, description="ID of the parent folder", json_schema_extra={"frontend_type": "text", "frontend_readonly": False, "frontend_required": False})
description: Optional[str] = Field(default=None, description="User-provided description of the file", json_schema_extra={"frontend_type": "textarea", "frontend_readonly": False, "frontend_required": False})
@ -51,7 +49,6 @@ registerModelLabels(
"mimeType": {"en": "MIME Type", "fr": "Type MIME"},
"fileHash": {"en": "File Hash", "fr": "Hash du fichier"},
"fileSize": {"en": "File Size", "fr": "Taille du fichier"},
"creationDate": {"en": "Creation Date", "fr": "Date de création"},
"tags": {"en": "Tags", "fr": "Tags"},
"folderId": {"en": "Folder ID", "fr": "ID du dossier"},
"description": {"en": "Description", "fr": "Description"},
@ -88,7 +85,7 @@ registerModelLabels(
},
)
class FileData(BaseModel):
class FileData(PowerOnModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
data: str = Field(description="File data content")
base64Encoded: bool = Field(description="Whether the data is base64 encoded")

View file

@ -9,11 +9,11 @@ import uuid
import secrets
from typing import Optional, List
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
from modules.shared.timeUtils import getUtcTimestamp
class Invitation(BaseModel):
class Invitation(PowerOnModel):
"""
Einladungs-Token für neue User.
Ermöglicht Self-Service Onboarding zu Mandanten und Feature-Instanzen.
@ -56,15 +56,6 @@ class Invitation(BaseModel):
description="Email address to send invitation link (optional)",
json_schema_extra={"frontend_type": "email", "frontend_readonly": False, "frontend_required": False}
)
createdBy: str = Field(
description="User ID of the person who created the invitation",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True}
)
createdAt: float = Field(
default_factory=getUtcTimestamp,
description="When the invitation was created (UTC timestamp)",
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False}
)
expiresAt: float = Field(
description="When the invitation expires (UTC timestamp)",
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": True}
@ -121,8 +112,6 @@ registerModelLabels(
"roleIds": {"en": "Roles", "de": "Rollen", "fr": "Rôles"},
"targetUsername": {"en": "Target Username", "de": "Ziel-Benutzername", "fr": "Nom d'utilisateur cible"},
"email": {"en": "Email (optional)", "de": "E-Mail (optional)", "fr": "Email (optionnel)"},
"createdBy": {"en": "Created By", "de": "Erstellt von", "fr": "Créé par"},
"createdAt": {"en": "Created At", "de": "Erstellt am", "fr": "Créé le"},
"expiresAt": {"en": "Expires At", "de": "Gültig bis", "fr": "Expire le"},
"usedBy": {"en": "Used By", "de": "Verwendet von", "fr": "Utilisé par"},
"usedAt": {"en": "Used At", "de": "Verwendet am", "fr": "Utilisé le"},

View file

@ -12,12 +12,13 @@ Vector fields use json_schema_extra={"db_type": "vector(1536)"} for pgvector.
from typing import Dict, Any, List, Optional
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
from modules.shared.timeUtils import getUtcTimestamp
import uuid
class FileContentIndex(BaseModel):
class FileContentIndex(PowerOnModel):
"""Structural index of a file's content objects. Created without AI.
Lives in the Instance Layer; optionally promoted to Shared Layer via isShared."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key (typically = fileId)")
@ -73,7 +74,7 @@ registerModelLabels(
)
class ContentChunk(BaseModel):
class ContentChunk(PowerOnModel):
"""Persisted content chunk with embedding vector. Reusable across workflows.
Scalar content object (or chunk thereof) with pgvector embedding."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
@ -111,7 +112,7 @@ registerModelLabels(
)
class RoundMemory(BaseModel):
class RoundMemory(PowerOnModel):
"""Persistent per-round memory for agent tool results, file refs, and decisions.
Stored after each agent round so that RAG can retrieve relevant context
@ -135,7 +136,6 @@ class RoundMemory(BaseModel):
description="Embedding of summary for semantic retrieval",
json_schema_extra={"db_type": "vector(1536)"},
)
createdAt: float = Field(default_factory=getUtcTimestamp, description="Creation timestamp")
registerModelLabels(
@ -151,12 +151,11 @@ registerModelLabels(
"fullData": {"en": "Full Data", "fr": "Données complètes"},
"fileIds": {"en": "File IDs", "fr": "IDs de fichier"},
"embedding": {"en": "Embedding", "fr": "Vecteur d'embedding"},
"createdAt": {"en": "Created At", "fr": "Créé le"},
},
)
class WorkflowMemory(BaseModel):
class WorkflowMemory(PowerOnModel):
"""Workflow-scoped key-value cache for entities and facts.
Extracted during agent rounds, persisted for cross-round and cross-workflow reuse."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
@ -166,7 +165,6 @@ class WorkflowMemory(BaseModel):
key: str = Field(description="Key identifier (e.g. 'entity:companyName')")
value: str = Field(description="Extracted value")
source: str = Field(default="extraction", description="Origin: extraction, tool, conversation, summary")
createdAt: float = Field(default_factory=getUtcTimestamp, description="Creation timestamp")
embedding: Optional[List[float]] = Field(
default=None, description="Optional embedding for semantic lookup",
json_schema_extra={"db_type": "vector(1536)"}
@ -184,7 +182,6 @@ registerModelLabels(
"key": {"en": "Key", "fr": "Clé"},
"value": {"en": "Value", "fr": "Valeur"},
"source": {"en": "Source", "fr": "Source"},
"createdAt": {"en": "Created At", "fr": "Créé le"},
"embedding": {"en": "Embedding", "fr": "Vecteur d'embedding"},
},
)

View file

@ -9,10 +9,11 @@ Rollen werden über Junction Tables verknüpft für saubere CASCADE DELETE.
import uuid
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
class UserMandate(BaseModel):
class UserMandate(PowerOnModel):
"""
User-Mitgliedschaft in einem Mandanten.
Kein User gehört direkt zu einem Mandanten - Zugehörigkeit wird über dieses Model gesteuert.
@ -50,7 +51,7 @@ registerModelLabels(
)
class FeatureAccess(BaseModel):
class FeatureAccess(PowerOnModel):
"""
User-Zugriff auf eine Feature-Instanz.
Definiert welche User auf welche Feature-Instanzen zugreifen können.
@ -88,7 +89,7 @@ registerModelLabels(
)
class UserMandateRole(BaseModel):
class UserMandateRole(PowerOnModel):
"""
Junction Table: UserMandate zu Role.
Ermöglicht CASCADE DELETE auf Datenbankebene.
@ -119,7 +120,7 @@ registerModelLabels(
)
class FeatureAccessRole(BaseModel):
class FeatureAccessRole(PowerOnModel):
"""
Junction Table: FeatureAccess zu Role.
Ermöglicht CASCADE DELETE auf Datenbankebene.

View file

@ -6,8 +6,8 @@ import uuid
from typing import Optional
from enum import Enum
from pydantic import BaseModel, Field, ConfigDict
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
from modules.shared.timeUtils import getUtcTimestamp
class MessagingChannel(str, Enum):
@ -26,7 +26,7 @@ class DeliveryStatus(str, Enum):
FAILED = "failed"
class MessagingSubscription(BaseModel):
class MessagingSubscription(PowerOnModel):
"""Data model for messaging subscriptions"""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
@ -64,26 +64,6 @@ class MessagingSubscription(BaseModel):
description="Whether the subscription is enabled",
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False}
)
creationDate: float = Field(
default_factory=getUtcTimestamp,
description="When the subscription was created (UTC timestamp in seconds)",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
)
lastModified: float = Field(
default_factory=getUtcTimestamp,
description="When the subscription was last modified (UTC timestamp in seconds)",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
)
createdBy: Optional[str] = Field(
default=None,
description="User ID who created the subscription",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
modifiedBy: Optional[str] = Field(
default=None,
description="User ID who last modified the subscription",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
model_config = ConfigDict(use_enum_values=True)
@ -100,10 +80,6 @@ registerModelLabels(
"description": {"en": "Description", "fr": "Description"},
"isSystemSubscription": {"en": "System Subscription", "fr": "Abonnement système"},
"enabled": {"en": "Enabled", "fr": "Activé"},
"creationDate": {"en": "Creation Date", "fr": "Date de création"},
"lastModified": {"en": "Last Modified", "fr": "Dernière modification"},
"createdBy": {"en": "Created By", "fr": "Créé par"},
"modifiedBy": {"en": "Modified By", "fr": "Modifié par"},
},
)
@ -155,16 +131,6 @@ class MessagingSubscriptionRegistration(BaseModel):
description="Whether this registration is enabled",
json_schema_extra={"frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False}
)
creationDate: float = Field(
default_factory=getUtcTimestamp,
description="When the registration was created (UTC timestamp in seconds)",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
)
lastModified: float = Field(
default_factory=getUtcTimestamp,
description="When the registration was last modified (UTC timestamp in seconds)",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
)
model_config = ConfigDict(use_enum_values=True)
@ -181,8 +147,6 @@ registerModelLabels(
"channel": {"en": "Channel", "fr": "Canal"},
"channelConfig": {"en": "Channel Config", "fr": "Configuration du canal"},
"enabled": {"en": "Enabled", "fr": "Activé"},
"creationDate": {"en": "Creation Date", "fr": "Date de création"},
"lastModified": {"en": "Last Modified", "fr": "Dernière modification"},
},
)
@ -248,11 +212,6 @@ class MessagingDelivery(BaseModel):
description="When the delivery was sent (UTC timestamp in seconds)",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
)
creationDate: float = Field(
default_factory=getUtcTimestamp,
description="When the delivery record was created (UTC timestamp in seconds)",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False}
)
model_config = ConfigDict(use_enum_values=True)
@ -270,7 +229,6 @@ registerModelLabels(
"status": {"en": "Status", "fr": "Statut"},
"errorMessage": {"en": "Error Message", "fr": "Message d'erreur"},
"sentAt": {"en": "Sent At", "fr": "Envoyé le"},
"creationDate": {"en": "Creation Date", "fr": "Date de création"},
},
)
@ -349,4 +307,3 @@ class MessagingSubscriptionExecutionResult(BaseModel):
description="Error message if execution failed",
json_schema_extra={"frontend_type": "textarea", "frontend_readonly": True, "frontend_required": False}
)
model_config = ConfigDict(extra="allow") # Allow additional fields for custom results

View file

@ -9,8 +9,8 @@ import uuid
from typing import Optional, List
from enum import Enum
from pydantic import BaseModel, Field, ConfigDict
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
from modules.shared.timeUtils import getUtcTimestamp
class NotificationType(str, Enum):
@ -43,7 +43,7 @@ class NotificationAction(BaseModel):
)
class UserNotification(BaseModel):
class UserNotification(PowerOnModel):
"""
In-app notification for a user.
Supports actionable notifications with accept/decline buttons.
@ -137,11 +137,6 @@ class UserNotification(BaseModel):
)
# Timestamps
createdAt: float = Field(
default_factory=getUtcTimestamp,
description="When the notification was created (UTC timestamp)",
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False}
)
readAt: Optional[float] = Field(
default=None,
description="When the notification was read (UTC timestamp)",
@ -177,7 +172,6 @@ registerModelLabels(
"actions": {"en": "Actions", "de": "Aktionen", "fr": "Actions"},
"actionTaken": {"en": "Action Taken", "de": "Durchgeführte Aktion", "fr": "Action effectuée"},
"actionResult": {"en": "Action Result", "de": "Aktions-Ergebnis", "fr": "Résultat de l'action"},
"createdAt": {"en": "Created At", "de": "Erstellt am", "fr": "Créé le"},
"readAt": {"en": "Read At", "de": "Gelesen am", "fr": "Lu le"},
"actionedAt": {"en": "Actioned At", "de": "Bearbeitet am", "fr": "Traité le"},
"expiresAt": {"en": "Expires At", "de": "Gültig bis", "fr": "Expire le"},

View file

@ -13,6 +13,7 @@ import uuid
from typing import Optional
from enum import Enum
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
from modules.datamodels.datamodelUtils import TextMultilingual
from modules.datamodels.datamodelUam import AccessLevel
@ -25,7 +26,7 @@ class AccessRuleContext(str, Enum):
RESOURCE = "RESOURCE" # System resources (AI models, actions, etc.)
class Role(BaseModel):
class Role(PowerOnModel):
"""
Data model for RBAC roles.
@ -90,7 +91,7 @@ registerModelLabels(
)
class AccessRule(BaseModel):
class AccessRule(PowerOnModel):
"""
Data model for access control rules.

View file

@ -11,6 +11,7 @@ Multi-Tenant Design:
from typing import Optional, Any
from pydantic import BaseModel, Field, ConfigDict, model_validator
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
from modules.shared.timeUtils import getUtcTimestamp
from .datamodelUam import AuthAuthority
@ -30,7 +31,7 @@ class TokenPurpose(str, Enum):
DATA_CONNECTION = "dataConnection"
class Token(BaseModel):
class Token(PowerOnModel):
"""
Authentication Token model.
@ -55,9 +56,6 @@ class Token(BaseModel):
description="When the token expires (UTC timestamp in seconds)"
)
tokenRefresh: Optional[str] = None
createdAt: Optional[float] = Field(
None, description="When the token was created (UTC timestamp in seconds)"
)
status: TokenStatus = Field(
default=TokenStatus.ACTIVE, description="Token status: active/revoked"
)
@ -106,7 +104,6 @@ registerModelLabels(
"tokenType": {"en": "Token Type", "de": "Token-Typ", "fr": "Type de jeton"},
"expiresAt": {"en": "Expires At", "de": "Läuft ab am", "fr": "Expire le"},
"tokenRefresh": {"en": "Refresh Token", "de": "Refresh-Token", "fr": "Jeton de rafraîchissement"},
"createdAt": {"en": "Created At", "de": "Erstellt am", "fr": "Créé le"},
"status": {"en": "Status", "de": "Status", "fr": "Statut"},
"revokedAt": {"en": "Revoked At", "de": "Widerrufen am", "fr": "Révoqué le"},
"revokedBy": {"en": "Revoked By", "de": "Widerrufen von", "fr": "Révoqué par"},
@ -116,7 +113,7 @@ registerModelLabels(
)
class AuthEvent(BaseModel):
class AuthEvent(PowerOnModel):
"""Authentication event for audit logging."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the auth event", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
userId: str = Field(description="ID of the user this event belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True})

View file

@ -10,6 +10,7 @@ from typing import Dict, List, Optional
from enum import Enum
from datetime import datetime, timezone
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
import uuid
@ -124,7 +125,7 @@ registerModelLabels(
# Instance: MandateSubscription
# ============================================================================
class MandateSubscription(BaseModel):
class MandateSubscription(PowerOnModel):
"""A subscription instance bound to a specific mandate.
See wiki/concepts/Subscription-State-Machine.md for state transitions."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")

View file

@ -13,6 +13,7 @@ import uuid
from typing import Optional, List, Dict
from enum import Enum
from pydantic import BaseModel, Field, EmailStr, field_validator, computed_field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
from modules.shared.timeUtils import getUtcTimestamp
@ -65,7 +66,7 @@ class MandateType(str, Enum):
COMPANY = "company"
class Mandate(BaseModel):
class Mandate(PowerOnModel):
"""
Mandate (Mandant/Tenant) model.
Ein Mandant ist ein isolierter Bereich für Daten und Berechtigungen.
@ -145,7 +146,7 @@ registerModelLabels(
)
class UserConnection(BaseModel):
class UserConnection(PowerOnModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the connection", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
userId: str = Field(description="ID of the user this connection belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
authority: AuthAuthority = Field(description="Authentication authority", json_schema_extra={"frontend_type": "select", "frontend_readonly": True, "frontend_required": False, "frontend_options": "/api/connections/authorities/options"})
@ -202,7 +203,7 @@ registerModelLabels(
)
class User(BaseModel):
class User(PowerOnModel):
"""
User model.
@ -289,6 +290,11 @@ class User(BaseModel):
description="Primary authentication authority",
json_schema_extra={"frontend_type": "select", "frontend_readonly": True, "frontend_required": False, "frontend_options": "/api/connections/authorities/options"}
)
roleLabels: List[str] = Field(
default_factory=list,
description="Role labels (from DB or enriched when loading users)",
json_schema_extra={"frontend_type": "multiselect", "frontend_readonly": True, "frontend_visible": False, "frontend_required": False},
)
registerModelLabels(
@ -303,6 +309,7 @@ registerModelLabels(
"enabled": {"en": "Enabled", "de": "Aktiviert", "fr": "Activé"},
"isSysAdmin": {"en": "System Admin", "de": "System-Admin", "fr": "Admin système"},
"authenticationAuthority": {"en": "Auth Authority", "de": "Authentifizierung", "fr": "Autorité d'authentification"},
"roleLabels": {"en": "Role Labels", "de": "Rollen-Labels", "fr": "Libellés de rôles"},
},
)
@ -325,7 +332,7 @@ registerModelLabels(
)
class UserVoicePreferences(BaseModel):
class UserVoicePreferences(PowerOnModel):
"""User-level voice/language preferences, shared across all features."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
userId: str = Field(description="User ID")

View file

@ -3,13 +3,13 @@
"""Utility datamodels: Prompt, TextMultilingual."""
from typing import Dict, Optional
from pydantic import BaseModel, ConfigDict, Field, field_validator
from pydantic import BaseModel, Field, field_validator
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
import uuid
class Prompt(BaseModel):
model_config = ConfigDict(extra='allow') # Preserve system fields (_createdBy, _createdAt, etc.)
class Prompt(PowerOnModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
mandateId: str = Field(default="", description="ID of the mandate this prompt belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
isSystem: bool = Field(default=False, description="System prompt visible to all users (read-only for non-SysAdmin)", json_schema_extra={"frontend_type": "boolean", "frontend_readonly": True, "frontend_required": False})

View file

@ -4,6 +4,7 @@
from typing import List, Dict, Any, Optional
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
from modules.datamodels.datamodelUtils import TextMultilingual
import uuid
@ -48,7 +49,7 @@ registerModelLabels(
)
class AutomationTemplate(BaseModel):
class AutomationTemplate(PowerOnModel):
"""Automation-Vorlage ohne scharfe Placeholder-Werte (DB-persistiert).
System-Templates (isSystem=True): Nur durch SysAdmin aenderbar. Alle User koennen lesen.
@ -82,9 +83,6 @@ class AutomationTemplate(BaseModel):
description="Feature instance ID (null for system templates, set for instance-scoped templates)",
json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False}
)
# System fields (_createdAt, _createdBy, etc.) werden automatisch vom DB-Connector gesetzt
registerModelLabels(
"AutomationTemplate",
{"en": "Automation Template", "ge": "Automation-Vorlage", "fr": "Modèle d'automatisation"},

View file

@ -22,6 +22,13 @@ from modules.shared.configuration import APP_CONFIG
logger = logging.getLogger(__name__)
def _automationDefinitionPayload(data: Dict[str, Any]) -> Dict[str, Any]:
"""Strip connector/enrichment keys; only fields defined on AutomationDefinition."""
allowed = AutomationDefinition.model_fields.keys()
return {k: v for k, v in (data or {}).items() if k in allowed}
# Singleton factory for Automation instances
_automationInterfaces = {}
@ -100,7 +107,7 @@ class AutomationObjects:
if recordId:
record = self.db.getRecordset(model, recordFilter={"id": recordId})
if record:
return record[0].get("_createdBy") == self.userId
return record[0].get("sysCreatedBy") == self.userId
else:
return False # Record not found = no access
return True # No recordId needed (e.g., for CREATE)
@ -130,7 +137,7 @@ class AutomationObjects:
featureInstanceIds = set()
for automation in automations:
createdBy = automation.get("_createdBy")
createdBy = automation.get("sysCreatedBy")
if createdBy:
userIds.add(createdBy)
@ -186,8 +193,8 @@ class AutomationObjects:
# Enrich each automation with the fetched data
# SECURITY: Never show a fallback name — if lookup fails, show empty string
for automation in automations:
createdBy = automation.get("_createdBy")
automation["_createdByUserName"] = usersMap.get(createdBy, "") if createdBy else ""
createdBy = automation.get("sysCreatedBy")
automation["sysCreatedByUserName"] = usersMap.get(createdBy, "") if createdBy else ""
mandateId = automation.get("mandateId")
automation["mandateName"] = mandatesMap.get(mandateId, "") if mandateId else ""
@ -295,7 +302,7 @@ class AutomationObjects:
Args:
automationId: ID of the automation to get
includeSystemFields: If True, returns raw dict with system fields (_createdBy, etc).
includeSystemFields: If True, returns raw dict with system fields (sysCreatedBy, etc).
If False (default), returns Pydantic model without system fields.
"""
try:
@ -330,7 +337,7 @@ class AutomationObjects:
return AutomationWithSystemFields(automation)
# Clean metadata fields and return Pydantic model
cleanedRecord = {k: v for k, v in automation.items() if not k.startswith("_")}
cleanedRecord = _automationDefinitionPayload(automation)
return AutomationDefinition(**cleanedRecord)
except Exception as e:
logger.error(f"Error getting automation definition: {str(e)}")
@ -365,7 +372,7 @@ class AutomationObjects:
# Ensure database connector has correct userId context
if not self.userId:
logger.error(f"createAutomationDefinition: userId is not set! Cannot set _createdBy. currentUser={self.currentUser}")
logger.error(f"createAutomationDefinition: userId is not set! Cannot set sysCreatedBy. currentUser={self.currentUser}")
elif hasattr(self.db, 'updateContext'):
try:
self.db.updateContext(self.userId)
@ -386,7 +393,7 @@ class AutomationObjects:
self._notifyAutomationChanged()
# Clean metadata fields and return Pydantic model
cleanedRecord = {k: v for k, v in createdAutomation.items() if not k.startswith("_")}
cleanedRecord = _automationDefinitionPayload(createdAutomation)
return AutomationDefinition(**cleanedRecord)
except Exception as e:
logger.error(f"Error creating automation definition: {str(e)}")
@ -446,7 +453,7 @@ class AutomationObjects:
self._notifyAutomationChanged()
# Clean metadata fields and return Pydantic model
cleanedRecord = {k: v for k, v in updatedAutomation.items() if not k.startswith("_")}
cleanedRecord = _automationDefinitionPayload(updatedAutomation)
return AutomationDefinition(**cleanedRecord)
except Exception as e:
logger.error(f"Error updating automation definition: {str(e)}")
@ -561,7 +568,7 @@ class AutomationObjects:
# Collect unique user IDs
userIds = set()
for template in templates:
createdBy = template.get("_createdBy")
createdBy = template.get("sysCreatedBy")
if createdBy:
userIds.add(createdBy)
@ -585,8 +592,8 @@ class AutomationObjects:
# Apply to templates — SECURITY: no fallback, empty if not found
for template in templates:
createdBy = template.get("_createdBy")
template["_createdByUserName"] = userNameMap.get(createdBy, "") if createdBy else ""
createdBy = template.get("sysCreatedBy")
template["sysCreatedByUserName"] = userNameMap.get(createdBy, "") if createdBy else ""
except Exception as e:
logger.warning(f"Could not enrich templates with user names: {e}")

View file

@ -77,8 +77,8 @@ def get_automations(
# If pagination was requested, result is PaginatedResult
# If no pagination, result is List[Dict]
# Note: Using JSONResponse to bypass Pydantic validation which would filter out _createdBy
# The enriched fields (_createdByUserName, mandateName) are not in the Pydantic model
# Note: Using JSONResponse to bypass Pydantic validation which would filter out sysCreatedBy
# The enriched fields (sysCreatedByUserName, mandateName) are not in the Pydantic model
from fastapi.responses import JSONResponse
if paginationParams:

View file

@ -4,6 +4,7 @@
from typing import Dict, Any, List, Optional
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
import uuid
@ -52,7 +53,7 @@ registerModelLabels(
)
class Automation2WorkflowRun(BaseModel):
class Automation2WorkflowRun(PowerOnModel):
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
description="Primary key",
@ -98,7 +99,7 @@ registerModelLabels(
)
class Automation2HumanTask(BaseModel):
class Automation2HumanTask(PowerOnModel):
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
description="Primary key",

View file

@ -359,7 +359,7 @@ def get_workflows(
active_run = None
last_started_at = None
for r in runs:
ts = r.get("_createdAt")
ts = r.get("sysCreatedAt")
if ts and (last_started_at is None or ts > last_started_at):
last_started_at = ts
if r.get("status") in ("running", "paused"):
@ -375,7 +375,7 @@ def get_workflows(
"runStatus": active_run.get("status") if active_run else None,
"stuckAtNodeId": stuck_at_node_id,
"stuckAtNodeLabel": stuck_at_node_label or stuck_at_node_id or "",
"createdAt": wf.get("_createdAt"),
"createdAt": wf.get("sysCreatedAt"),
"lastStartedAt": last_started_at,
})
return {"workflows": enriched}
@ -536,7 +536,7 @@ def get_tasks(
context: RequestContext = Depends(getRequestContext),
) -> dict:
"""Get tasks - by default those assigned to current user, or all if no assignee filter.
Enriches each task with workflowLabel and createdAt (_createdAt).
Enriches each task with workflowLabel and createdAt (from sysCreatedAt).
"""
mandateId = _validateInstanceAccess(instanceId, context)
a2 = getAutomation2Interface(context.user, mandateId, instanceId)
@ -549,7 +549,7 @@ def get_tasks(
enriched.append({
**t,
"workflowLabel": wf.get("label", t.get("workflowId", "")) if wf else t.get("workflowId", ""),
"createdAt": t.get("_createdAt"),
"createdAt": t.get("sysCreatedAt"),
})
return {"tasks": enriched}

View file

@ -20,6 +20,7 @@ from modules.datamodels.datamodelRbac import AccessRuleContext
from modules.datamodels.datamodelUam import AccessLevel
from modules.datamodels.datamodelChat import UserInputRequest
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
# =============================================================================
@ -27,7 +28,7 @@ from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
# =============================================================================
class ChatbotDocument(BaseModel):
class ChatbotDocument(PowerOnModel):
"""Documents attached to chatbot messages."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
messageId: str = Field(description="Foreign key to message")
@ -41,7 +42,7 @@ class ChatbotDocument(BaseModel):
actionId: Optional[str] = Field(None, description="ID of the action that created this document")
class ChatbotMessage(BaseModel):
class ChatbotMessage(PowerOnModel):
"""Messages in chatbot conversations. Must match bridge format in memory.py."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
conversationId: str = Field(description="Foreign key to conversation")
@ -64,7 +65,7 @@ class ChatbotMessage(BaseModel):
actionProgress: Optional[str] = Field(None, description="Action progress status")
class ChatbotLog(BaseModel):
class ChatbotLog(PowerOnModel):
"""Log entries for chatbot conversations."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
conversationId: str = Field(description="Foreign key to conversation")
@ -85,7 +86,7 @@ class ChatbotWorkflowModeEnum(str, Enum):
WORKFLOW_CHATBOT = "Chatbot"
class ChatbotConversation(BaseModel):
class ChatbotConversation(PowerOnModel):
"""Chatbot conversation container. Per feature-instance isolation via featureInstanceId."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key")
featureInstanceId: str = Field(description="Feature instance ID for per-instance isolation")
@ -328,9 +329,8 @@ class ChatObjects:
objectFields[fieldName] = value
else:
# Field not in model - treat as scalar if simple, otherwise filter out
# BUT: always include metadata fields (_createdBy, _createdAt, etc.) as they're handled by connector
# Underscore-prefixed keys (e.g. UI meta) pass through; sys* live on PowerOnModel subclasses
if fieldName.startswith("_"):
# Metadata fields should be passed through to connector
simpleFields[fieldName] = value
elif isinstance(value, (str, int, float, bool, type(None))):
simpleFields[fieldName] = value

View file

@ -7,6 +7,8 @@ Pydantic models for coaching contexts, sessions, messages, tasks, scores, and us
from typing import Optional, List, Dict, Any
from pydantic import BaseModel, Field
from enum import Enum
from modules.datamodels.datamodelBase import PowerOnModel
import uuid
@ -73,7 +75,7 @@ class CoachingScoreTrend(str, Enum):
# Database Models
# ============================================================================
class CoachingContext(BaseModel):
class CoachingContext(PowerOnModel):
"""A coaching context/dossier representing a topic the user is working on."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
userId: str = Field(description="Owner user ID (strict ownership)")
@ -91,11 +93,9 @@ class CoachingContext(BaseModel):
lastSessionAt: Optional[str] = Field(default=None)
rollingOverview: Optional[str] = Field(default=None, description="AI summary of older sessions for long context history")
rollingOverviewUpToSessionCount: Optional[int] = Field(default=None, description="Session count covered by rollingOverview")
createdAt: Optional[str] = Field(default=None)
updatedAt: Optional[str] = Field(default=None)
class CoachingSession(BaseModel):
class CoachingSession(PowerOnModel):
"""A single coaching conversation session within a context."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
contextId: str = Field(description="FK to CoachingContext")
@ -115,11 +115,9 @@ class CoachingSession(BaseModel):
emailSent: bool = Field(default=False)
startedAt: Optional[str] = Field(default=None)
endedAt: Optional[str] = Field(default=None)
createdAt: Optional[str] = Field(default=None)
updatedAt: Optional[str] = Field(default=None)
class CoachingMessage(BaseModel):
class CoachingMessage(PowerOnModel):
"""A single message in a coaching session."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
sessionId: str = Field(description="FK to CoachingSession")
@ -130,10 +128,9 @@ class CoachingMessage(BaseModel):
contentType: CoachingMessageContentType = Field(default=CoachingMessageContentType.TEXT)
audioRef: Optional[str] = Field(default=None, description="Reference to audio file")
metadata: Optional[str] = Field(default=None, description="JSON: token count, voice info, etc.")
createdAt: Optional[str] = Field(default=None)
class CoachingTask(BaseModel):
class CoachingTask(PowerOnModel):
"""A task/checklist item assigned within a coaching context."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
contextId: str = Field(description="FK to CoachingContext")
@ -146,11 +143,9 @@ class CoachingTask(BaseModel):
priority: CoachingTaskPriority = Field(default=CoachingTaskPriority.MEDIUM)
dueDate: Optional[str] = Field(default=None)
completedAt: Optional[str] = Field(default=None)
createdAt: Optional[str] = Field(default=None)
updatedAt: Optional[str] = Field(default=None)
class CoachingScore(BaseModel):
class CoachingScore(PowerOnModel):
"""A competence score for a dimension, recorded after a session."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
contextId: str = Field(description="FK to CoachingContext")
@ -161,10 +156,9 @@ class CoachingScore(BaseModel):
score: float = Field(ge=0.0, le=100.0)
trend: CoachingScoreTrend = Field(default=CoachingScoreTrend.STABLE)
evidence: Optional[str] = Field(default=None, description="AI reasoning for the score")
createdAt: Optional[str] = Field(default=None)
class CoachingUserProfile(BaseModel):
class CoachingUserProfile(PowerOnModel):
"""Per-user coaching profile and preferences."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
userId: str = Field(description="Owner user ID")
@ -178,15 +172,13 @@ class CoachingUserProfile(BaseModel):
totalSessions: int = Field(default=0)
totalMinutes: int = Field(default=0)
lastSessionAt: Optional[str] = Field(default=None)
createdAt: Optional[str] = Field(default=None)
updatedAt: Optional[str] = Field(default=None)
# ============================================================================
# Iteration 2: Personas
# ============================================================================
class CoachingPersona(BaseModel):
class CoachingPersona(PowerOnModel):
"""A roleplay persona for coaching sessions."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
userId: str = Field(description="Owner user ID ('system' for builtins)")
@ -199,15 +191,13 @@ class CoachingPersona(BaseModel):
gender: Optional[str] = Field(default=None, description="m or f")
category: str = Field(default="builtin", description="'builtin' or 'custom'")
isActive: bool = Field(default=True)
createdAt: Optional[str] = Field(default=None)
updatedAt: Optional[str] = Field(default=None)
# ============================================================================
# Iteration 2: Badges / Gamification
# ============================================================================
class CoachingBadge(BaseModel):
class CoachingBadge(PowerOnModel):
"""An achievement badge awarded to a user."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
userId: str = Field(description="Owner user ID")
@ -215,7 +205,6 @@ class CoachingBadge(BaseModel):
instanceId: str = Field(description="Feature instance ID")
badgeKey: str = Field(description="Badge identifier, e.g. 'streak_7'")
awardedAt: Optional[str] = Field(default=None)
createdAt: Optional[str] = Field(default=None)
# ============================================================================

View file

@ -6,6 +6,7 @@ import uuid
from enum import Enum
from typing import Optional
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
@ -16,7 +17,7 @@ class DataScope(str, Enum):
GLOBAL = "global"
class DataNeutraliserConfig(BaseModel):
class DataNeutraliserConfig(PowerOnModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Unique ID of the configuration", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
mandateId: str = Field(description="ID of the mandate this configuration belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True})
featureInstanceId: str = Field(description="ID of the feature instance this configuration belongs to", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True})

View file

@ -7,6 +7,7 @@ Implements a general Swiss architecture planning data model.
from typing import List, Dict, Any, Optional, ForwardRef
from enum import Enum
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
from modules.shared.timeUtils import getUtcTimestamp
import uuid
@ -178,7 +179,7 @@ class Dokument(BaseModel):
)
class Kontext(BaseModel):
class Kontext(PowerOnModel):
"""Supporting data object for flexible additional information."""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
@ -248,7 +249,7 @@ class Land(BaseModel):
)
class Kanton(BaseModel):
class Kanton(PowerOnModel):
"""Cantonal level administrative entity."""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
@ -368,7 +369,7 @@ class Gemeinde(BaseModel):
ParzelleRef = ForwardRef('Parzelle')
class Parzelle(BaseModel):
class Parzelle(PowerOnModel):
"""Represents a plot with all building law properties."""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
@ -594,7 +595,7 @@ class Parzelle(BaseModel):
)
class Projekt(BaseModel):
class Projekt(PowerOnModel):
"""Core object representing a construction project."""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),

View file

@ -9,6 +9,8 @@ from pydantic import BaseModel, Field
from enum import Enum
import uuid
from modules.datamodels.datamodelBase import PowerOnModel
# ============================================================================
# Enums
@ -72,7 +74,7 @@ class TeamsbotTransferMode(str, Enum):
# Database Models (stored in PostgreSQL)
# ============================================================================
class TeamsbotSession(BaseModel):
class TeamsbotSession(PowerOnModel):
"""A Teams Bot meeting session."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Session ID")
instanceId: str = Field(description="Feature instance ID (FK)")
@ -90,11 +92,9 @@ class TeamsbotSession(BaseModel):
errorMessage: Optional[str] = Field(default=None, description="Error message if status is ERROR")
transcriptSegmentCount: int = Field(default=0, description="Number of transcript segments in this session")
botResponseCount: int = Field(default=0, description="Number of bot responses in this session")
creationDate: Optional[str] = Field(default=None, description="ISO timestamp of record creation")
lastModified: Optional[str] = Field(default=None, description="ISO timestamp of last modification")
class TeamsbotTranscript(BaseModel):
class TeamsbotTranscript(PowerOnModel):
"""A single transcript segment from the meeting."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Transcript segment ID")
sessionId: str = Field(description="Session ID (FK)")
@ -105,10 +105,9 @@ class TeamsbotTranscript(BaseModel):
language: Optional[str] = Field(default=None, description="Detected language code (e.g., de-DE)")
isFinal: bool = Field(default=True, description="Whether this is a final or interim result")
source: Optional[str] = Field(default=None, description="Source: caption, audioCapture, chat, chatHistory, speakerHint")
creationDate: Optional[str] = Field(default=None, description="ISO timestamp of record creation")
class TeamsbotBotResponse(BaseModel):
class TeamsbotBotResponse(PowerOnModel):
"""A bot response generated during a meeting session."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Response ID")
sessionId: str = Field(description="Session ID (FK)")
@ -121,14 +120,13 @@ class TeamsbotBotResponse(BaseModel):
processingTime: float = Field(default=0.0, description="Processing time in seconds")
priceCHF: float = Field(default=0.0, description="Cost of this AI call in CHF")
timestamp: Optional[str] = Field(default=None, description="ISO timestamp of the response")
creationDate: Optional[str] = Field(default=None, description="ISO timestamp of record creation")
# ============================================================================
# System Bot Accounts (stored in PostgreSQL, credentials encrypted)
# ============================================================================
class TeamsbotSystemBot(BaseModel):
class TeamsbotSystemBot(PowerOnModel):
"""A system bot account for authenticated meeting joins.
Credentials are stored encrypted in the database, NOT in the UI-visible config.
Only mandate admins can manage system bots."""
@ -138,15 +136,13 @@ class TeamsbotSystemBot(BaseModel):
email: str = Field(description="Microsoft account email")
encryptedPassword: str = Field(description="Encrypted Microsoft account password")
isActive: bool = Field(default=True, description="Whether this bot account is active")
creationDate: Optional[str] = Field(default=None, description="ISO timestamp of creation")
lastModified: Optional[str] = Field(default=None, description="ISO timestamp of last modification")
# ============================================================================
# User Account Credentials (stored in PostgreSQL, credentials encrypted)
# ============================================================================
class TeamsbotUserAccount(BaseModel):
class TeamsbotUserAccount(PowerOnModel):
"""Saved Microsoft credentials for 'Mein Account' joins.
Each user can store their own MS credentials per mandate.
Password is encrypted; on login only MFA confirmation is needed."""
@ -156,15 +152,13 @@ class TeamsbotUserAccount(BaseModel):
email: str = Field(description="Microsoft account email")
encryptedPassword: str = Field(description="Encrypted Microsoft account password")
displayName: Optional[str] = Field(default=None, description="Display name derived from MS account")
creationDate: Optional[str] = Field(default=None, description="ISO timestamp of creation")
lastModified: Optional[str] = Field(default=None, description="ISO timestamp of last modification")
# ============================================================================
# Per-User Settings (stored in PostgreSQL, per user per instance)
# ============================================================================
class TeamsbotUserSettings(BaseModel):
class TeamsbotUserSettings(PowerOnModel):
"""Per-user settings for the Teams Bot feature.
Each user has their own settings per feature instance.
These override the instance-level defaults (TeamsbotConfig)."""
@ -182,8 +176,6 @@ class TeamsbotUserSettings(BaseModel):
triggerCooldownSeconds: Optional[int] = Field(default=None, description="Trigger cooldown override")
contextWindowSegments: Optional[int] = Field(default=None, description="Context window override")
debugMode: Optional[bool] = Field(default=None, description="Debug mode override")
creationDate: Optional[str] = Field(default=None, description="ISO timestamp of creation")
lastModified: Optional[str] = Field(default=None, description="ISO timestamp of last modification")
# ============================================================================

View file

@ -5,11 +5,13 @@
from enum import Enum
from typing import Optional
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
import uuid
class TrusteeOrganisation(BaseModel):
class TrusteeOrganisation(PowerOnModel):
"""Represents trustee organisations (companies) within the Trustee feature."""
id: str = Field( # Unique string label (PK), not UUID
description="Unique organisation identifier (label)",
@ -55,7 +57,7 @@ class TrusteeOrganisation(BaseModel):
}
)
# System attributes are automatically set by DatabaseConnector:
# _createdAt, _modifiedAt, _createdBy, _modifiedBy
# sysCreatedAt, sysModifiedAt, sysCreatedBy, sysModifiedBy (PowerOnModel)
registerModelLabels(
@ -71,7 +73,7 @@ registerModelLabels(
)
class TrusteeRole(BaseModel):
class TrusteeRole(PowerOnModel):
"""Defines roles within the Trustee feature."""
id: str = Field( # Unique string label (PK), not UUID
description="Unique role identifier (label)",
@ -122,7 +124,7 @@ registerModelLabels(
)
class TrusteeAccess(BaseModel):
class TrusteeAccess(PowerOnModel):
"""Defines user access to organisations with specific roles."""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
@ -207,7 +209,7 @@ registerModelLabels(
)
class TrusteeContract(BaseModel):
class TrusteeContract(PowerOnModel):
"""Defines customer contracts within organisations."""
id: str = Field(
default_factory=lambda: str(uuid.uuid4()),
@ -289,7 +291,7 @@ class TrusteeDocumentTypeEnum(str, Enum):
AUTO = "auto"
class TrusteeDocument(BaseModel):
class TrusteeDocument(PowerOnModel):
"""Contains document references for bookings.
Documents reference files in the central Files table via fileId.
@ -413,7 +415,7 @@ registerModelLabels(
)
class TrusteePosition(BaseModel):
class TrusteePosition(PowerOnModel):
"""Contains booking positions (expense entries).
A position can have up to two document references: documentId (Beleg) and bankDocumentId (Bank-Referenz).
@ -696,10 +698,6 @@ class TrusteePosition(BaseModel):
}
)
# Allow extra fields like _createdAt from database
model_config = {"extra": "allow"}
registerModelLabels(
"TrusteePosition",
{"en": "Position", "fr": "Position", "de": "Position"},
@ -739,7 +737,7 @@ registerModelLabels(
# ── TrusteeData* tables (synced from external accounting apps for analysis) ──
class TrusteeDataAccount(BaseModel):
class TrusteeDataAccount(PowerOnModel):
"""Chart of accounts synced from external accounting system."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
accountNumber: str = Field(description="Account number (e.g. '1020')")
@ -769,7 +767,7 @@ registerModelLabels(
)
class TrusteeDataJournalEntry(BaseModel):
class TrusteeDataJournalEntry(PowerOnModel):
"""Journal entry header synced from external accounting system."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
externalId: Optional[str] = Field(default=None, description="ID in the source system")
@ -799,7 +797,7 @@ registerModelLabels(
)
class TrusteeDataJournalLine(BaseModel):
class TrusteeDataJournalLine(PowerOnModel):
"""Journal entry line (debit/credit) synced from external accounting system."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
journalEntryId: str = Field(description="FK → TrusteeDataJournalEntry.id")
@ -833,7 +831,7 @@ registerModelLabels(
)
class TrusteeDataContact(BaseModel):
class TrusteeDataContact(PowerOnModel):
"""Customer or vendor synced from external accounting system."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
externalId: Optional[str] = Field(default=None, description="ID in the source system")
@ -873,7 +871,7 @@ registerModelLabels(
)
class TrusteeDataAccountBalance(BaseModel):
class TrusteeDataAccountBalance(PowerOnModel):
"""Account balance per period, derived from journal lines or directly from accounting system."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
accountNumber: str = Field(description="Account number")
@ -907,7 +905,7 @@ registerModelLabels(
)
class TrusteeAccountingConfig(BaseModel):
class TrusteeAccountingConfig(PowerOnModel):
"""Per-instance accounting system configuration with encrypted credentials.
Each feature instance can connect to exactly one accounting system.
@ -946,7 +944,7 @@ registerModelLabels(
)
class TrusteeAccountingSync(BaseModel):
class TrusteeAccountingSync(PowerOnModel):
"""Tracks which position was synced to which external system and when.
Used for duplicate prevention, audit trail, and retry logic.

View file

@ -1152,7 +1152,7 @@ class TrusteeObjects:
logger.warning(f"Document {documentId} not found")
return None
createdBy = existing.get("_createdBy")
createdBy = existing.get("sysCreatedBy")
# Check system RBAC permission (userreport can only edit their own records)
if not self.checkCombinedPermission(TrusteeDocument, "update", recordCreatedBy=createdBy):
@ -1178,7 +1178,7 @@ class TrusteeObjects:
logger.warning(f"Document {documentId} not found")
return False
createdBy = existing.get("_createdBy")
createdBy = existing.get("sysCreatedBy")
if not self.checkCombinedPermission(TrusteeDocument, "delete", recordCreatedBy=createdBy):
logger.warning(f"User {self.userId} lacks permission to delete document")
@ -1198,7 +1198,7 @@ class TrusteeObjects:
def _toTrusteePositionOrDelete(self, rawRecord: Dict[str, Any], deleteCorrupt: bool = True) -> Optional[TrusteePosition]:
"""Build TrusteePosition safely; optionally delete irreparably corrupt records."""
cleanRecord = {k: v for k, v in (rawRecord or {}).items() if not k.startswith("_") or k == "_createdAt"}
cleanRecord = {k: v for k, v in (rawRecord or {}).items() if not k.startswith("_") or k == "sysCreatedAt"}
if not cleanRecord:
return None
@ -1271,7 +1271,7 @@ class TrusteeObjects:
"""Get all positions with RBAC filtering and optional DB-level pagination.
Filtering, sorting, and pagination are handled at the SQL level.
Post-processing cleans internal fields (keeps _createdAt) and validates
Post-processing cleans internal fields (keeps sysCreatedAt) and validates
each record via _toTrusteePositionOrDelete (corrupt rows are deleted).
NOTE(post-process): totalItems may slightly overcount when corrupt legacy
@ -1288,7 +1288,7 @@ class TrusteeObjects:
featureCode=self.FEATURE_CODE
)
keepFields = {'_createdAt'}
keepFields = {'sysCreatedAt'}
def _cleanAndValidate(records):
items = []
@ -1369,7 +1369,7 @@ class TrusteeObjects:
logger.warning(f"Position {positionId} not found")
return None
createdBy = existing.get("_createdBy")
createdBy = existing.get("sysCreatedBy")
# Check system RBAC permission (userreport can only edit their own records)
if not self.checkCombinedPermission(TrusteePosition, "update", recordCreatedBy=createdBy):
@ -1391,7 +1391,7 @@ class TrusteeObjects:
logger.warning(f"Position {positionId} not found")
return False
createdBy = existing.get("_createdBy")
createdBy = existing.get("sysCreatedBy")
if not self.checkCombinedPermission(TrusteePosition, "delete", recordCreatedBy=createdBy):
logger.warning(f"User {self.userId} lacks permission to delete position")

View file

@ -4,11 +4,12 @@
from typing import Optional
from pydantic import BaseModel, Field
from modules.datamodels.datamodelBase import PowerOnModel
from modules.shared.attributeUtils import registerModelLabels
import uuid
class WorkspaceUserSettings(BaseModel):
class WorkspaceUserSettings(PowerOnModel):
"""Per-user workspace settings. None values mean 'use instance default'."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
userId: str = Field(description="User ID", json_schema_extra={"frontend_type": "text", "frontend_readonly": True, "frontend_required": True})

View file

@ -128,7 +128,7 @@ TEMPLATE_ROLES = [
"accessRules": [
{"context": "UI", "item": None, "view": True},
{"context": "RESOURCE", "item": None, "view": True},
# DATA: never ALL in shared instances — every role (including admin) sees only _createdBy = self
# DATA: never ALL in shared instances — every role (including admin) sees only sysCreatedBy = self
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "m", "update": "m", "delete": "m"},
]
},

View file

@ -11,7 +11,7 @@ Multi-Tenant Design:
"""
import logging
from typing import Optional, Dict
from typing import Optional, Dict, Set, Tuple
from passlib.context import CryptContext
from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.configuration import APP_CONFIG
@ -38,6 +38,120 @@ pwdContext = CryptContext(schemes=["argon2"], deprecated="auto")
# Cache für Role-IDs (roleLabel -> roleId)
_roleIdCache: Dict[str, str] = {}
# Historical PostgreSQL column identifiers (pre-sys*). Used only in _migrateSystemFieldColumns SQL.
_LEGACY_SYS_PAIR_RENAMES: Tuple[Tuple[str, str], ...] = (
("_createdAt", "sysCreatedAt"),
("_createdBy", "sysCreatedBy"),
("_modifiedAt", "sysModifiedAt"),
("_modifiedBy", "sysModifiedBy"),
)
def _getPublicTableColumns(db: DatabaseConnector, tableName: str) -> Set[str]:
"""Column names for a quoted PostgreSQL table (exact case in information_schema)."""
try:
with db.connection.cursor() as cursor:
cursor.execute(
"""
SELECT column_name FROM information_schema.columns
WHERE table_schema = 'public' AND table_name = %s
""",
(tableName,),
)
return {row["column_name"] for row in cursor.fetchall()}
except Exception as e:
logger.warning(f"_getPublicTableColumns failed for {tableName}: {e}")
return set()
def _migrateSystemFieldColumns(db: DatabaseConnector) -> None:
"""Backfill sys* from older physical columns and business duplicates where sys* IS NULL (idempotent)."""
businessFieldMigrations: Dict[str, Dict[str, str]] = {
"FileFolder": {"createdAt": "sysCreatedAt"},
"FileItem": {"creationDate": "sysCreatedAt"},
"Invitation": {"createdAt": "sysCreatedAt", "createdBy": "sysCreatedBy"},
"FeatureDataSource": {"createdAt": "sysCreatedAt"},
"DataSource": {"createdAt": "sysCreatedAt"},
"UserNotification": {"createdAt": "sysCreatedAt"},
"Token": {"createdAt": "sysCreatedAt"},
"MessagingSubscription": {"createdBy": "sysCreatedBy", "modifiedBy": "sysModifiedBy"},
"CoachingContext": {"createdAt": "sysCreatedAt"},
"CoachingSession": {"createdAt": "sysCreatedAt", "updatedAt": "sysModifiedAt"},
"CoachingMessage": {"createdAt": "sysCreatedAt"},
"CoachingTask": {"createdAt": "sysCreatedAt", "updatedAt": "sysModifiedAt"},
"CoachingScore": {"createdAt": "sysCreatedAt"},
"CoachingUserProfile": {"createdAt": "sysCreatedAt", "updatedAt": "sysModifiedAt"},
"CoachingPersona": {"createdAt": "sysCreatedAt", "updatedAt": "sysModifiedAt"},
"CoachingBadge": {"createdAt": "sysCreatedAt"},
"TeamsbotSession": {"creationDate": "sysCreatedAt", "lastModified": "sysModifiedAt"},
"TeamsbotTranscript": {"creationDate": "sysCreatedAt"},
"TeamsbotBotResponse": {"creationDate": "sysCreatedAt"},
"TeamsbotSystemBot": {"creationDate": "sysCreatedAt", "lastModified": "sysModifiedAt"},
"TeamsbotUserAccount": {"creationDate": "sysCreatedAt", "lastModified": "sysModifiedAt"},
"TeamsbotUserSettings": {"creationDate": "sysCreatedAt", "lastModified": "sysModifiedAt"},
"_system": {
k: v
for k, v in _LEGACY_SYS_PAIR_RENAMES
if k in ("_createdAt", "_modifiedAt")
},
}
try:
db._ensure_connection()
with db.connection.cursor() as cursor:
cursor.execute(
"""
SELECT table_name FROM information_schema.tables
WHERE table_schema = 'public' AND table_type = 'BASE TABLE'
"""
)
tableNames = [row["table_name"] for row in cursor.fetchall()]
totalUpdates = 0
for table in tableNames:
cols = _getPublicTableColumns(db, table)
if not cols:
continue
for old_col, new_col in _LEGACY_SYS_PAIR_RENAMES:
if old_col in cols and new_col in cols:
try:
with db.connection.cursor() as cursor:
cursor.execute(
f'UPDATE "{table}" SET "{new_col}" = "{old_col}" '
f'WHERE "{new_col}" IS NULL AND "{old_col}" IS NOT NULL'
)
totalUpdates += cursor.rowcount
db.connection.commit()
except Exception as e:
db.connection.rollback()
logger.debug(f"Column migrate skip {table}.{old_col}->{new_col}: {e}")
biz = businessFieldMigrations.get(table)
if biz:
for old_col, new_col in biz.items():
if old_col in cols and new_col in cols:
try:
with db.connection.cursor() as cursor:
cursor.execute(
f'UPDATE "{table}" SET "{new_col}" = "{old_col}" '
f'WHERE "{new_col}" IS NULL AND "{old_col}" IS NOT NULL'
)
totalUpdates += cursor.rowcount
db.connection.commit()
except Exception as e:
db.connection.rollback()
logger.debug(f"Business field migrate skip {table}.{old_col}->{new_col}: {e}")
if totalUpdates:
logger.info(f"_migrateSystemFieldColumns: backfilled {totalUpdates} cell(s) on {db.dbDatabase}")
except Exception as e:
logger.error(f"_migrateSystemFieldColumns failed: {e}")
try:
db.connection.rollback()
except Exception:
pass
def initBootstrap(db: DatabaseConnector) -> None:
"""
@ -50,6 +164,9 @@ def initBootstrap(db: DatabaseConnector) -> None:
# Initialize root mandate
mandateId = initRootMandate(db)
# Backfill sys* columns from legacy _* / duplicate business fields (idempotent)
_migrateSystemFieldColumns(db)
# Migrate existing mandate records: description -> label
_migrateMandateDescriptionToLabel(db)
@ -146,13 +263,13 @@ def initAutomationTemplates(dbApp: DatabaseConnector, adminUserId: Optional[str]
"""
Seed initial automation templates from subAutomationTemplates.py.
Only runs if no templates exist yet (bootstrap).
Creates templates with _createdBy = admin user (SysAdmin privilege).
Creates templates with sysCreatedBy = admin user (SysAdmin privilege).
NOTE: AutomationTemplate lives in poweron_automation database, not poweron_app!
Args:
dbApp: Database connector for poweron_app (used to get admin user if needed)
adminUserId: Admin user ID for _createdBy field
adminUserId: Admin user ID for sysCreatedBy field
"""
import json
from modules.features.automation.subAutomationTemplates import AUTOMATION_TEMPLATES

View file

@ -187,12 +187,8 @@ class AppObjects:
# Complex objects that should be filtered out
objectFields[fieldName] = value
else:
# Field not in model - treat as scalar if simple, otherwise filter out
# BUT: always include metadata fields (_createdBy, _createdAt, etc.) as they're handled by connector
if fieldName.startswith("_"):
# Metadata fields should be passed through to connector
simpleFields[fieldName] = value
elif isinstance(value, (str, int, float, bool, type(None))):
# Field not in model - pass through scalars; nested objects go to objectFields
if isinstance(value, (str, int, float, bool, type(None))):
simpleFields[fieldName] = value
else:
objectFields[fieldName] = value
@ -528,7 +524,7 @@ class AppObjects:
items = []
for record in result["items"]:
cleanedUser = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedUser = dict(record)
if cleanedUser.get("roleLabels") is None:
cleanedUser["roleLabels"] = []
items.append(User(**cleanedUser))
@ -560,7 +556,7 @@ class AppObjects:
# Return first matching user (should be unique)
userDict = users[0]
# Filter out database-specific fields
cleanedUser = {k: v for k, v in userDict.items() if not k.startswith("_")}
cleanedUser = dict(userDict)
# Ensure roleLabels is always a list, not None
if cleanedUser.get("roleLabels") is None:
cleanedUser["roleLabels"] = []
@ -586,7 +582,7 @@ class AppObjects:
# User already filtered by RBAC, just clean fields
user_dict = users[0]
cleanedUser = {k: v for k, v in user_dict.items() if not k.startswith("_")}
cleanedUser = dict(user_dict)
# Ensure roleLabels is always a list, not None
if cleanedUser.get("roleLabels") is None:
cleanedUser["roleLabels"] = []
@ -648,12 +644,10 @@ class AppObjects:
if not self._verifyPassword(password, userRecord["hashedPassword"]):
raise ValueError("Invalid password")
# Return clean User object (without password hash and internal fields)
cleanedUser = {k: v for k, v in userRecord.items() if not k.startswith("_") and k != "hashedPassword" and k != "resetToken" and k != "resetTokenExpires"}
# Ensure roleLabels is always a list
if cleanedUser.get("roleLabels") is None:
cleanedUser["roleLabels"] = []
return User(**cleanedUser)
user = User.model_validate(userRecord)
if user.roleLabels is None:
return user.model_copy(update={"roleLabels": []})
return user
def createUser(
self,
@ -877,7 +871,7 @@ class AppObjects:
result = []
for userRecord in users:
cleanedUser = {k: v for k, v in userRecord.items() if not k.startswith("_")}
cleanedUser = dict(userRecord)
if cleanedUser.get("roleLabels") is None:
cleanedUser["roleLabels"] = []
result.append(User(**cleanedUser))
@ -917,7 +911,7 @@ class AppObjects:
)
if users:
cleanedUser = {k: v for k, v in users[0].items() if not k.startswith("_")}
cleanedUser = dict(users[0])
if cleanedUser.get("roleLabels") is None:
cleanedUser["roleLabels"] = []
return User(**cleanedUser)
@ -978,7 +972,7 @@ class AppObjects:
)
if users:
cleanedUser = {k: v for k, v in users[0].items() if not k.startswith("_")}
cleanedUser = dict(users[0])
if cleanedUser.get("roleLabels") is None:
cleanedUser["roleLabels"] = []
return User(**cleanedUser)
@ -1041,7 +1035,7 @@ class AppObjects:
logger.warning(f"Reset token expired for user {userRecord.get('id')}")
return None
cleanedUser = {k: v for k, v in userRecord.items() if not k.startswith("_")}
cleanedUser = dict(userRecord)
if cleanedUser.get("roleLabels") is None:
cleanedUser["roleLabels"] = []
return User(**cleanedUser)
@ -1329,7 +1323,7 @@ class AppObjects:
# Filter out database-specific fields
filteredMandates = []
for mandate in allMandates:
cleanedMandate = {k: v for k, v in mandate.items() if not k.startswith("_")}
cleanedMandate = dict(mandate)
filteredMandates.append(cleanedMandate)
# If no pagination requested, return all items
@ -1378,7 +1372,7 @@ class AppObjects:
# Filter out database-specific fields
filteredMandates = []
for mandate in mandates:
cleanedMandate = {k: v for k, v in mandate.items() if not k.startswith("_")}
cleanedMandate = dict(mandate)
filteredMandates.append(cleanedMandate)
if not filteredMandates:
return None
@ -1794,7 +1788,7 @@ class AppObjects:
)
if not records:
return None
cleanedRecord = {k: v for k, v in records[0].items() if not k.startswith("_")}
cleanedRecord = dict(records[0])
return UserMandate(**cleanedRecord)
except Exception as e:
logger.error(f"Error getting UserMandate: {e}")
@ -1817,7 +1811,7 @@ class AppObjects:
)
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(UserMandate(**cleanedRecord))
return result
except Exception as e:
@ -1869,7 +1863,7 @@ class AppObjects:
self._ensureUserBillingAccount(userId, mandateId)
self._syncSubscriptionQuantity(mandateId)
cleanedRecord = {k: v for k, v in createdRecord.items() if not k.startswith("_")}
cleanedRecord = dict(createdRecord)
return UserMandate(**cleanedRecord)
except Exception as e:
logger.error(f"Error creating UserMandate: {e}")
@ -1999,7 +1993,7 @@ class AppObjects:
)
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(UserMandate(**cleanedRecord))
return result
except Exception as e:
@ -2023,7 +2017,7 @@ class AppObjects:
)
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(UserMandateRole(**cleanedRecord))
return result
except Exception as e:
@ -2120,7 +2114,7 @@ class AppObjects:
recordFilter={"userMandateId": userMandateId, "roleId": roleId}
)
if existing:
cleanedRecord = {k: v for k, v in existing[0].items() if not k.startswith("_")}
cleanedRecord = dict(existing[0])
return UserMandateRole(**cleanedRecord)
userMandateRole = UserMandateRole(
@ -2128,7 +2122,7 @@ class AppObjects:
roleId=roleId
)
createdRecord = self.db.recordCreate(UserMandateRole, userMandateRole.model_dump())
cleanedRecord = {k: v for k, v in createdRecord.items() if not k.startswith("_")}
cleanedRecord = dict(createdRecord)
return UserMandateRole(**cleanedRecord)
except Exception as e:
logger.error(f"Error adding role to UserMandate: {e}")
@ -2193,7 +2187,7 @@ class AppObjects:
)
if not records:
return None
cleanedRecord = {k: v for k, v in records[0].items() if not k.startswith("_")}
cleanedRecord = dict(records[0])
return FeatureAccess(**cleanedRecord)
except Exception as e:
logger.error(f"Error getting FeatureAccess: {e}")
@ -2216,7 +2210,7 @@ class AppObjects:
)
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(FeatureAccess(**cleanedRecord))
return result
except Exception as e:
@ -2240,7 +2234,7 @@ class AppObjects:
)
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(FeatureAccess(**cleanedRecord))
return result
except Exception as e:
@ -2289,7 +2283,7 @@ class AppObjects:
)
self.db.recordCreate(FeatureAccessRole, featureAccessRole.model_dump())
cleanedRecord = {k: v for k, v in createdRecord.items() if not k.startswith("_")}
cleanedRecord = dict(createdRecord)
return FeatureAccess(**cleanedRecord)
except Exception as e:
logger.error(f"Error creating FeatureAccess: {e}")
@ -2427,7 +2421,7 @@ class AppObjects:
try:
records = self.db.getRecordset(Invitation, recordFilter={"id": invitationId})
if records:
cleanedRecord = {k: v for k, v in records[0].items() if not k.startswith("_")}
cleanedRecord = dict(records[0])
return Invitation(**cleanedRecord)
return None
except Exception as e:
@ -2447,7 +2441,7 @@ class AppObjects:
try:
records = self.db.getRecordset(Invitation, recordFilter={"token": token})
if records:
cleanedRecord = {k: v for k, v in records[0].items() if not k.startswith("_")}
cleanedRecord = dict(records[0])
return Invitation(**cleanedRecord)
return None
except Exception as e:
@ -2468,7 +2462,7 @@ class AppObjects:
records = self.db.getRecordset(Invitation, recordFilter={"mandateId": mandateId})
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(Invitation(**cleanedRecord))
return result
except Exception as e:
@ -2486,10 +2480,10 @@ class AppObjects:
List of Invitation objects
"""
try:
records = self.db.getRecordset(Invitation, recordFilter={"createdBy": creatorId})
records = self.db.getRecordset(Invitation, recordFilter={"sysCreatedBy": creatorId})
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(Invitation(**cleanedRecord))
return result
except Exception as e:
@ -2510,7 +2504,7 @@ class AppObjects:
records = self.db.getRecordset(Invitation, recordFilter={"usedBy": usedById})
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(Invitation(**cleanedRecord))
return result
except Exception as e:
@ -2531,7 +2525,7 @@ class AppObjects:
records = self.db.getRecordset(Invitation, recordFilter={"targetUsername": targetUsername})
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(Invitation(**cleanedRecord))
return result
except Exception as e:
@ -2558,13 +2552,10 @@ class AppObjects:
items = []
for record in result["items"]:
cleanedRecord = {
k: v for k, v in record.items()
if not k.startswith("_") and k not in ["hashedPassword", "resetToken", "resetTokenExpires"]
}
if cleanedRecord.get("roleLabels") is None:
cleanedRecord["roleLabels"] = []
items.append(User(**cleanedRecord))
user = User.model_validate(record)
if user.roleLabels is None:
user = user.model_copy(update={"roleLabels": []})
items.append(user)
if pagination is None:
return items
@ -2593,7 +2584,7 @@ class AppObjects:
try:
records = self.db.getRecordset(UserMandate, recordFilter={"id": userMandateId})
if records:
cleanedRecord = {k: v for k, v in records[0].items() if not k.startswith("_")}
cleanedRecord = dict(records[0])
return UserMandate(**cleanedRecord)
return None
except Exception as e:
@ -2614,7 +2605,7 @@ class AppObjects:
records = self.db.getRecordset(UserMandateRole, recordFilter={"roleId": roleId})
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(UserMandateRole(**cleanedRecord))
return result
except Exception as e:
@ -2634,7 +2625,7 @@ class AppObjects:
try:
records = self.db.getRecordset(FeatureInstance, recordFilter={"id": instanceId})
if records:
cleanedRecord = {k: v for k, v in records[0].items() if not k.startswith("_")}
cleanedRecord = dict(records[0])
return FeatureInstance(**cleanedRecord)
return None
except Exception as e:
@ -2654,7 +2645,7 @@ class AppObjects:
try:
records = self.db.getRecordset(Feature, recordFilter={"code": featureCode})
if records:
cleanedRecord = {k: v for k, v in records[0].items() if not k.startswith("_")}
cleanedRecord = dict(records[0])
return Feature(**cleanedRecord)
return None
except Exception as e:
@ -2679,7 +2670,7 @@ class AppObjects:
records = self.db.getRecordset(FeatureInstance, recordFilter=recordFilter)
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(FeatureInstance(**cleanedRecord))
return result
except Exception as e:
@ -2703,7 +2694,7 @@ class AppObjects:
try:
records = self.db.getRecordset(UserNotification, recordFilter={"id": notificationId})
if records:
cleanedRecord = {k: v for k, v in records[0].items() if not k.startswith("_")}
cleanedRecord = dict(records[0])
return UserNotification(**cleanedRecord)
return None
except Exception as e:
@ -2734,10 +2725,10 @@ class AppObjects:
records = self.db.getRecordset(UserNotification, recordFilter=recordFilter)
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(UserNotification(**cleanedRecord))
# Sort by createdAt descending
result.sort(key=lambda x: x.createdAt or 0, reverse=True)
# Sort by sysCreatedAt descending
result.sort(key=lambda x: x.sysCreatedAt or 0, reverse=True)
if limit:
result = result[:limit]
return result
@ -2762,7 +2753,7 @@ class AppObjects:
try:
records = self.db.getRecordset(AccessRule, recordFilter={"id": ruleId})
if records:
cleanedRecord = {k: v for k, v in records[0].items() if not k.startswith("_")}
cleanedRecord = dict(records[0])
return AccessRule(**cleanedRecord)
return None
except Exception as e:
@ -2783,7 +2774,7 @@ class AppObjects:
records = self.db.getRecordset(AccessRule, recordFilter={"roleId": roleId})
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(AccessRule(**cleanedRecord))
return result
except Exception as e:
@ -2804,7 +2795,7 @@ class AppObjects:
records = self.db.getRecordset(Role, recordFilter={"featureInstanceId": featureInstanceId})
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(Role(**cleanedRecord))
return result
except Exception as e:
@ -2829,7 +2820,7 @@ class AppObjects:
records = self.db.getRecordset(Role, recordFilter=recordFilter)
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(Role(**cleanedRecord))
return result
except Exception as e:
@ -3028,7 +3019,7 @@ class AppObjects:
)
result = []
for token_dict in tokens:
cleanedRecord = {k: v for k, v in token_dict.items() if not k.startswith("_")}
cleanedRecord = dict(token_dict)
result.append(Token(**cleanedRecord))
return result
except Exception as e:
@ -3049,7 +3040,7 @@ class AppObjects:
)
result = []
for token_dict in tokens:
cleanedRecord = {k: v for k, v in token_dict.items() if not k.startswith("_")}
cleanedRecord = dict(token_dict)
result.append(Token(**cleanedRecord))
return result
except Exception as e:
@ -3363,7 +3354,7 @@ class AppObjects:
# Filter out database-specific fields
filteredRules = []
for rule in rules:
cleanedRule = {k: v for k, v in rule.items() if not k.startswith("_")}
cleanedRule = dict(rule)
filteredRules.append(cleanedRule)
# If no pagination requested, return all items
@ -3547,7 +3538,7 @@ class AppObjects:
Role,
recordFilter={"mandateId": mandateId, "featureInstanceId": None}
)
return [Role(**{k: v for k, v in r.items() if not k.startswith("_")}) for r in roles]
return [Role(**dict(r)) for r in roles]
except Exception as e:
logger.error(f"Error getting roles for mandate {mandateId}: {e}")
return []
@ -3568,7 +3559,7 @@ class AppObjects:
items = []
for record in result["items"]:
cleanedRole = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRole = dict(record)
items.append(Role(**cleanedRole))
if pagination is None:

View file

@ -674,7 +674,7 @@ class BillingObjects:
if startDate or endDate:
filtered = []
for t in results:
createdAt = t.get("_createdAt")
createdAt = t.get("sysCreatedAt")
if createdAt:
tDate = createdAt.date() if isinstance(createdAt, datetime) else createdAt
if startDate and tDate < startDate:
@ -684,7 +684,7 @@ class BillingObjects:
filtered.append(t)
results = filtered
results.sort(key=lambda x: x.get("_createdAt", ""), reverse=True)
results.sort(key=lambda x: x.get("sysCreatedAt", ""), reverse=True)
return results[offset:offset + limit]
except Exception as e:
@ -739,7 +739,7 @@ class BillingObjects:
transactions = self.getTransactions(account["id"], limit=limit)
allTransactions.extend(transactions)
allTransactions.sort(key=lambda x: x.get("_createdAt", ""), reverse=True)
allTransactions.sort(key=lambda x: x.get("sysCreatedAt", ""), reverse=True)
return allTransactions[:limit]
# =========================================================================
@ -1244,7 +1244,7 @@ class BillingObjects:
except Exception as e:
logger.error(f"Error getting transactions for user: {e}")
allTransactions.sort(key=lambda x: x.get("_createdAt", ""), reverse=True)
allTransactions.sort(key=lambda x: x.get("sysCreatedAt", ""), reverse=True)
return allTransactions[:limit]
# =========================================================================
@ -1361,7 +1361,7 @@ class BillingObjects:
logger.error(f"Error getting mandate transactions: {e}")
# Sort by creation date descending and limit
allTransactions.sort(key=lambda x: x.get("_createdAt", ""), reverse=True)
allTransactions.sort(key=lambda x: x.get("sysCreatedAt", ""), reverse=True)
return allTransactions[:limit]
# =========================================================================
@ -1549,5 +1549,5 @@ class BillingObjects:
logger.error(f"Error getting user transactions for mandates: {e}")
# Sort by creation date descending and limit
allTransactions.sort(key=lambda x: x.get("_createdAt", ""), reverse=True)
allTransactions.sort(key=lambda x: x.get("sysCreatedAt", ""), reverse=True)
return allTransactions[:limit]

View file

@ -251,9 +251,8 @@ class ChatObjects:
objectFields[fieldName] = value
else:
# Field not in model - treat as scalar if simple, otherwise filter out
# BUT: always include metadata fields (_createdBy, _createdAt, etc.) as they're handled by connector
# Underscore-prefixed keys (e.g. UI meta) pass through; sys* live on PowerOnModel subclasses
if fieldName.startswith("_"):
# Metadata fields should be passed through to connector
simpleFields[fieldName] = value
elif isinstance(value, (str, int, float, bool, type(None))):
simpleFields[fieldName] = value
@ -885,7 +884,7 @@ class ChatObjects:
"role": msg.get("role", "assistant"),
"status": msg.get("status", "step"),
"sequenceNr": msg.get("sequenceNr", 0),
"publishedAt": msg.get("publishedAt") or msg.get("_createdAt") or msg.get("timestamp") or 0,
"publishedAt": msg.get("publishedAt") or msg.get("sysCreatedAt") or msg.get("timestamp") or 0,
"success": msg.get("success"),
"actionId": msg.get("actionId"),
"actionMethod": msg.get("actionMethod"),
@ -1268,7 +1267,7 @@ class ChatObjects:
# CASCADE DELETE: Delete all related data first
# 1. Delete message documents (but NOT the files themselves)
# Bypass RBAC -- workflow access already verified, child records may have different _createdBy
# Bypass RBAC -- workflow access already verified, child records may have different sysCreatedBy
existing_docs = self.db.getRecordset(ChatDocument, recordFilter={"messageId": messageId})
for doc in existing_docs:
self.db.recordDelete(ChatDocument, doc["id"])
@ -1296,7 +1295,7 @@ class ChatObjects:
# Get documents for this message from normalized table
# Bypass RBAC -- workflow access already verified, child records may have different _createdBy
# Bypass RBAC -- workflow access already verified, child records may have different sysCreatedBy
documents = self.db.getRecordset(ChatDocument, recordFilter={"messageId": messageId})
if not documents:

View file

@ -175,12 +175,7 @@ class ComponentObjects:
# Complex objects that should be filtered out
objectFields[fieldName] = value
else:
# Field not in model - treat as scalar if simple, otherwise filter out
# BUT: always include metadata fields (_createdBy, _createdAt, etc.) as they're handled by connector
if fieldName.startswith("_"):
# Metadata fields should be passed through to connector
simpleFields[fieldName] = value
elif isinstance(value, (str, int, float, bool, type(None))):
if isinstance(value, (str, int, float, bool, type(None))):
simpleFields[fieldName] = value
else:
objectFields[fieldName] = value
@ -609,7 +604,7 @@ class ComponentObjects:
"""
isSysAdmin = self._isSysAdmin()
for prompt in prompts:
isOwner = prompt.get("_createdBy") == self.userId
isOwner = prompt.get("sysCreatedBy") == self.userId
prompt["_permissions"] = {
"canUpdate": isOwner or isSysAdmin,
"canDelete": isOwner or isSysAdmin
@ -621,13 +616,13 @@ class ComponentObjects:
Visibility rules:
- SysAdmin: ALL prompts
- Regular user: own prompts (_createdBy) + system prompts (isSystem=True)
- Regular user: own prompts (sysCreatedBy) + system prompts (isSystem=True)
"""
if self._isSysAdmin():
return self.db.getRecordset(Prompt)
# Get own prompts
ownPrompts = self.db.getRecordset(Prompt, recordFilter={"_createdBy": self.userId})
ownPrompts = self.db.getRecordset(Prompt, recordFilter={"sysCreatedBy": self.userId})
# Get system prompts
systemPrompts = self.db.getRecordset(Prompt, recordFilter={"isSystem": True})
@ -716,7 +711,7 @@ class ComponentObjects:
# Visibility check for non-SysAdmin: must be owner or system prompt
if not self._isSysAdmin():
isOwner = prompt.get("_createdBy") == self.userId
isOwner = prompt.get("sysCreatedBy") == self.userId
isSystem = prompt.get("isSystem", False)
if not isOwner and not isSystem:
return None
@ -747,7 +742,7 @@ class ComponentObjects:
raise ValueError(f"Prompt {promptId} not found")
# Permission check: owner or SysAdmin
isOwner = (getattr(prompt, '_createdBy', None) == self.userId)
isOwner = (getattr(prompt, 'sysCreatedBy', None) == self.userId)
if not self._isSysAdmin() and not isOwner:
raise PermissionError(f"No permission to update prompt {promptId}")
@ -784,7 +779,7 @@ class ComponentObjects:
return False
# Permission check: owner or SysAdmin
isOwner = (getattr(prompt, '_createdBy', None) == self.userId)
isOwner = (getattr(prompt, 'sysCreatedBy', None) == self.userId)
if not self._isSysAdmin() and not isOwner:
raise PermissionError(f"No permission to delete prompt {promptId}")
@ -798,7 +793,7 @@ class ComponentObjects:
def checkForDuplicateFile(self, fileHash: str, fileName: str) -> Optional[FileItem]:
"""Checks if a file with the same hash AND fileName already exists for the current user.
Duplicate = same user (_createdBy) + same fileHash + same fileName.
Duplicate = same user (sysCreatedBy) + same fileHash + same fileName.
Same hash with different name is allowed (intentional copy by user).
Uses direct DB query (not RBAC) because files are isolated per user.
"""
@ -809,7 +804,7 @@ class ComponentObjects:
matchingFiles = self.db.getRecordset(
FileItem,
recordFilter={
"_createdBy": self.userId,
"sysCreatedBy": self.userId,
"fileHash": fileHash,
"fileName": fileName
}
@ -908,7 +903,7 @@ class ComponentObjects:
def _getFilesByCurrentUser(self, recordFilter: Dict[str, Any] = None) -> List[Dict[str, Any]]:
"""Files are always user-scoped. Returns only files owned by the current user,
regardless of role (including SysAdmin). This bypasses RBAC intentionally."""
filterDict = {"_createdBy": self.userId}
filterDict = {"sysCreatedBy": self.userId}
if recordFilter:
filterDict.update(recordFilter)
return self.db.getRecordset(FileItem, recordFilter=filterDict)
@ -927,7 +922,7 @@ class ComponentObjects:
If pagination is provided: PaginatedResult with items and metadata
"""
# User-scoping filter: every user only sees their own files (bypasses RBAC SysAdmin override)
recordFilter = {"_createdBy": self.userId}
recordFilter = {"sysCreatedBy": self.userId}
def _convertFileItems(files):
fileItems = []
@ -974,7 +969,7 @@ class ComponentObjects:
def getFile(self, fileId: str) -> Optional[FileItem]:
"""Returns a file by ID if it belongs to the current user (user-scoped)."""
# Files are always user-scoped: filter by _createdBy (bypasses RBAC SysAdmin override)
# Files are always user-scoped: filter by sysCreatedBy (bypasses RBAC SysAdmin override)
filteredFiles = self._getFilesByCurrentUser(recordFilter={"id": fileId})
if not filteredFiles:
@ -1151,7 +1146,7 @@ class ComponentObjects:
self.db._ensure_connection()
with self.db.connection.cursor() as cursor:
cursor.execute(
'SELECT "id" FROM "FileItem" WHERE "id" = ANY(%s) AND "_createdBy" = %s',
'SELECT "id" FROM "FileItem" WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
(uniqueIds, self.userId or ""),
)
accessibleIds = [row["id"] for row in cursor.fetchall()]
@ -1162,7 +1157,7 @@ class ComponentObjects:
cursor.execute('DELETE FROM "FileData" WHERE "id" = ANY(%s)', (accessibleIds,))
cursor.execute(
'DELETE FROM "FileItem" WHERE "id" = ANY(%s) AND "_createdBy" = %s',
'DELETE FROM "FileItem" WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
(accessibleIds, self.userId or ""),
)
deletedFiles = cursor.rowcount
@ -1207,12 +1202,12 @@ class ComponentObjects:
def getFolder(self, folderId: str) -> Optional[Dict[str, Any]]:
"""Returns a folder by ID if it belongs to the current user."""
folders = self.db.getRecordset(FileFolder, recordFilter={"id": folderId, "_createdBy": self.userId or ""})
folders = self.db.getRecordset(FileFolder, recordFilter={"id": folderId, "sysCreatedBy": self.userId or ""})
return folders[0] if folders else None
def listFolders(self, parentId: Optional[str] = None) -> List[Dict[str, Any]]:
"""List folders for current user, optionally filtered by parentId."""
recordFilter = {"_createdBy": self.userId or ""}
recordFilter = {"sysCreatedBy": self.userId or ""}
if parentId is not None:
recordFilter["parentId"] = parentId
return self.db.getRecordset(FileFolder, recordFilter=recordFilter)
@ -1261,7 +1256,7 @@ class ComponentObjects:
self.db._ensure_connection()
with self.db.connection.cursor() as cursor:
cursor.execute(
'SELECT "id" FROM "FileItem" WHERE "id" = ANY(%s) AND "_createdBy" = %s',
'SELECT "id" FROM "FileItem" WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
(uniqueIds, self.userId or ""),
)
accessibleIds = [row["id"] for row in cursor.fetchall()]
@ -1270,8 +1265,8 @@ class ComponentObjects:
raise FileNotFoundError(f"Files not found or not accessible: {missingIds}")
cursor.execute(
'UPDATE "FileItem" SET "folderId" = %s, "_modifiedAt" = %s, "_modifiedBy" = %s '
'WHERE "id" = ANY(%s) AND "_createdBy" = %s',
'UPDATE "FileItem" SET "folderId" = %s, "sysModifiedAt" = %s, "sysModifiedBy" = %s '
'WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
(targetFolderId, getUtcTimestamp(), self.userId or "", accessibleIds, self.userId or ""),
)
movedFiles = cursor.rowcount
@ -1300,7 +1295,7 @@ class ComponentObjects:
existingInTarget = self.db.getRecordset(
FileFolder,
recordFilter={"parentId": targetParentId or "", "_createdBy": self.userId or ""},
recordFilter={"parentId": targetParentId or "", "sysCreatedBy": self.userId or ""},
)
existingNames = {f.get("name"): f.get("id") for f in existingInTarget}
movingNames: Dict[str, str] = {}
@ -1321,8 +1316,8 @@ class ComponentObjects:
self.db._ensure_connection()
with self.db.connection.cursor() as cursor:
cursor.execute(
'UPDATE "FileFolder" SET "parentId" = %s, "_modifiedAt" = %s, "_modifiedBy" = %s '
'WHERE "id" = ANY(%s) AND "_createdBy" = %s',
'UPDATE "FileFolder" SET "parentId" = %s, "sysModifiedAt" = %s, "sysModifiedBy" = %s '
'WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
(targetParentId, getUtcTimestamp(), self.userId or "", uniqueIds, self.userId or ""),
)
movedFolders = cursor.rowcount
@ -1340,7 +1335,7 @@ class ComponentObjects:
if not folder:
raise FileNotFoundError(f"Folder {folderId} not found")
childFolders = self.db.getRecordset(FileFolder, recordFilter={"parentId": folderId, "_createdBy": self.userId or ""})
childFolders = self.db.getRecordset(FileFolder, recordFilter={"parentId": folderId, "sysCreatedBy": self.userId or ""})
childFiles = self._getFilesByCurrentUser(recordFilter={"folderId": folderId})
if not recursive and (childFolders or childFiles):
@ -1389,7 +1384,7 @@ class ComponentObjects:
self.db._ensure_connection()
with self.db.connection.cursor() as cursor:
cursor.execute(
'SELECT "id" FROM "FileFolder" WHERE "id" = ANY(%s) AND "_createdBy" = %s',
'SELECT "id" FROM "FileFolder" WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
(uniqueIds, self.userId or ""),
)
rootAccessibleIds = [row["id"] for row in cursor.fetchall()]
@ -1402,12 +1397,12 @@ class ComponentObjects:
WITH RECURSIVE folder_tree AS (
SELECT "id"
FROM "FileFolder"
WHERE "id" = ANY(%s) AND "_createdBy" = %s
WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s
UNION ALL
SELECT child."id"
FROM "FileFolder" child
INNER JOIN folder_tree ft ON child."parentId" = ft."id"
WHERE child."_createdBy" = %s
WHERE child."sysCreatedBy" = %s
)
SELECT DISTINCT "id" FROM folder_tree
""",
@ -1416,7 +1411,7 @@ class ComponentObjects:
allFolderIds = [row["id"] for row in cursor.fetchall()]
cursor.execute(
'SELECT "id" FROM "FileItem" WHERE "folderId" = ANY(%s) AND "_createdBy" = %s',
'SELECT "id" FROM "FileItem" WHERE "folderId" = ANY(%s) AND "sysCreatedBy" = %s',
(allFolderIds, self.userId or ""),
)
allFileIds = [row["id"] for row in cursor.fetchall()]
@ -1424,7 +1419,7 @@ class ComponentObjects:
if allFileIds:
cursor.execute('DELETE FROM "FileData" WHERE "id" = ANY(%s)', (allFileIds,))
cursor.execute(
'DELETE FROM "FileItem" WHERE "id" = ANY(%s) AND "_createdBy" = %s',
'DELETE FROM "FileItem" WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
(allFileIds, self.userId or ""),
)
deletedFiles = cursor.rowcount
@ -1432,7 +1427,7 @@ class ComponentObjects:
deletedFiles = 0
cursor.execute(
'DELETE FROM "FileFolder" WHERE "id" = ANY(%s) AND "_createdBy" = %s',
'DELETE FROM "FileFolder" WHERE "id" = ANY(%s) AND "sysCreatedBy" = %s',
(allFolderIds, self.userId or ""),
)
deletedFolders = cursor.rowcount

View file

@ -57,7 +57,7 @@ class FeatureInterface:
records = self.db.getRecordset(Feature, recordFilter={"code": featureCode})
if not records:
return None
cleanedRecord = {k: v for k, v in records[0].items() if not k.startswith("_")}
cleanedRecord = dict(records[0])
return Feature(**cleanedRecord)
except Exception as e:
logger.error(f"Error getting feature {featureCode}: {e}")
@ -74,7 +74,7 @@ class FeatureInterface:
records = self.db.getRecordset(Feature)
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(Feature(**cleanedRecord))
return result
except Exception as e:
@ -120,7 +120,7 @@ class FeatureInterface:
records = self.db.getRecordset(FeatureInstance, recordFilter={"id": instanceId})
if not records:
return None
cleanedRecord = {k: v for k, v in records[0].items() if not k.startswith("_")}
cleanedRecord = dict(records[0])
return FeatureInstance(**cleanedRecord)
except Exception as e:
logger.error(f"Error getting feature instance {instanceId}: {e}")
@ -144,7 +144,7 @@ class FeatureInterface:
records = self.db.getRecordset(FeatureInstance, recordFilter=recordFilter)
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(FeatureInstance(**cleanedRecord))
return result
except Exception as e:
@ -199,7 +199,7 @@ class FeatureInterface:
if copyTemplateRoles:
self._copyTemplateRoles(featureCode, mandateId, instanceId)
cleanedRecord = {k: v for k, v in createdInstance.items() if not k.startswith("_")}
cleanedRecord = dict(createdInstance)
return FeatureInstance(**cleanedRecord)
except Exception as e:
@ -435,7 +435,7 @@ class FeatureInterface:
updated = self.db.recordModify(FeatureInstance, instanceId, filteredData)
if updated:
cleanedRecord = {k: v for k, v in updated.items() if not k.startswith("_")}
cleanedRecord = dict(updated)
return FeatureInstance(**cleanedRecord)
return None
except Exception as e:
@ -484,7 +484,7 @@ class FeatureInterface:
records = self.db.getRecordset(Role, recordFilter=recordFilter)
result = []
for record in records:
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_")}
cleanedRecord = dict(record)
result.append(Role(**cleanedRecord))
return result
except Exception as e:

View file

@ -17,7 +17,7 @@ Data Namespace Structure:
GROUP-Berechtigung:
- data.uam.*: GROUP filtert nach Mandant (via UserMandate)
- data.chat.*, data.files.*, data.automation.*: GROUP = MY (benutzer-eigen); bei gesetztem featureInstanceId zusätzlich _createdBy
- data.chat.*, data.files.*, data.automation.*: GROUP = MY (benutzer-eigen); bei gesetztem featureInstanceId zusätzlich sysCreatedBy
- data.feature.*: GROUP filtert nach mandateId/featureInstanceId
"""
@ -146,7 +146,7 @@ def getRecordsetWithRBAC(
mandateId: Explicit mandate context (from request header). Required for GROUP access.
featureInstanceId: Explicit feature instance context
enrichPermissions: If True, adds _permissions field to each record with row-level
permissions { canUpdate, canDelete } based on RBAC rules and _createdBy
permissions { canUpdate, canDelete } based on RBAC rules and sysCreatedBy
featureCode: Optional feature code for feature-specific tables (e.g., "trustee").
If None, table is treated as a system table.
@ -657,7 +657,7 @@ def buildRbacWhereClause(
# shared featureInstance (stale RBAC rules or merged roles). Same as MY.
namespaceAll = TABLE_NAMESPACE.get(table, "system")
if featureInstanceId and namespaceAll == "chat":
userIdFieldAll = "_createdBy"
userIdFieldAll = "sysCreatedBy"
if table == "UserInDB":
userIdFieldAll = "id"
elif table == "UserConnection":
@ -671,7 +671,7 @@ def buildRbacWhereClause(
return {"condition": " AND ".join(baseConditions), "values": baseValues}
return None
# My records - filter by _createdBy or userId field
# My records - filter by sysCreatedBy or userId field
if readLevel == AccessLevel.MY:
# Try common field names for creator
userIdField = None
@ -680,7 +680,7 @@ def buildRbacWhereClause(
elif table == "UserConnection":
userIdField = "userId"
else:
userIdField = "_createdBy"
userIdField = "sysCreatedBy"
conditions = list(baseConditions)
values = list(baseValues)
@ -707,7 +707,7 @@ def buildRbacWhereClause(
if featureInstanceId and readLevel == AccessLevel.GROUP:
conditions = list(baseConditions)
values = list(baseValues)
conditions.append('"_createdBy" = %s')
conditions.append('"sysCreatedBy" = %s')
values.append(currentUser.id)
return {"condition": " AND ".join(conditions), "values": values}
return {"condition": " AND ".join(baseConditions), "values": baseValues}
@ -829,7 +829,7 @@ def _enrichRecordsWithPermissions(
Logic:
- AccessLevel.ALL ('a'): User can update/delete all records
- AccessLevel.MY ('m'): User can only update/delete records where _createdBy == userId
- AccessLevel.MY ('m'): User can only update/delete records where sysCreatedBy == userId
- AccessLevel.GROUP ('g'): Same as MY for now (group-level ownership)
- AccessLevel.NONE ('n'): User cannot update/delete any records
@ -846,7 +846,7 @@ def _enrichRecordsWithPermissions(
for record in records:
recordCopy = dict(record)
createdBy = record.get("_createdBy")
createdBy = record.get("sysCreatedBy")
# Determine canUpdate
canUpdate = _checkRowPermission(permissions.update, userId, createdBy)
@ -873,7 +873,7 @@ def _checkRowPermission(
Args:
accessLevel: The permission level (ALL, MY, GROUP, NONE)
userId: Current user's ID
recordCreatedBy: The _createdBy value of the record
recordCreatedBy: The sysCreatedBy value of the record
Returns:
True if user has permission, False otherwise
@ -884,9 +884,9 @@ def _checkRowPermission(
if accessLevel == AccessLevel.ALL:
return True
# MY and GROUP: Check ownership via _createdBy
# MY and GROUP: Check ownership via sysCreatedBy
if accessLevel in (AccessLevel.MY, AccessLevel.GROUP):
# If record has no _createdBy, allow access (can't verify ownership)
# If record has no sysCreatedBy, allow access (can't verify ownership)
if not recordCreatedBy:
return True
# If no userId, can't verify - deny

View file

@ -80,7 +80,7 @@ def _migrateDataRecords(db, oldInstanceId: str, newInstanceId: str, userId: str)
cursor.execute(
f'UPDATE "{tableName}" '
f'SET "featureInstanceId" = %s '
f'WHERE "featureInstanceId" = %s AND "_createdBy" = %s',
f'WHERE "featureInstanceId" = %s AND "sysCreatedBy" = %s',
(newInstanceId, oldInstanceId, userId),
)
count = cursor.rowcount

View file

@ -112,12 +112,12 @@ def _buildEnrichedAutomationEvents(currentUser: User) -> List[Dict[str, Any]]:
if automation:
if isinstance(automation, dict):
job["name"] = automation.get("label", "")
job["createdBy"] = _resolveUsername(automation.get("_createdBy", ""))
job["createdBy"] = _resolveUsername(automation.get("sysCreatedBy", ""))
job["mandate"] = _resolveMandateLabel(automation.get("mandateId", ""))
job["featureInstance"] = _resolveFeatureLabel(automation.get("featureInstanceId", ""))
else:
job["name"] = getattr(automation, "label", "")
job["createdBy"] = _resolveUsername(getattr(automation, "_createdBy", ""))
job["createdBy"] = _resolveUsername(getattr(automation, "sysCreatedBy", ""))
job["mandate"] = _resolveMandateLabel(getattr(automation, "mandateId", ""))
job["featureInstance"] = _resolveFeatureLabel(getattr(automation, "featureInstanceId", ""))
else:

View file

@ -91,14 +91,14 @@ def _buildFlattenedExecutionLogs(currentUser: User) -> List[Dict[str, Any]]:
automationLabel = automation.get("label", "")
mandateId = automation.get("mandateId", "")
featureInstanceId = automation.get("featureInstanceId", "")
createdBy = automation.get("_createdBy", "")
createdBy = automation.get("sysCreatedBy", "")
logs = automation.get("executionLogs") or []
else:
automationId = getattr(automation, "id", "")
automationLabel = getattr(automation, "label", "")
mandateId = getattr(automation, "mandateId", "")
featureInstanceId = getattr(automation, "featureInstanceId", "")
createdBy = getattr(automation, "_createdBy", "")
createdBy = getattr(automation, "sysCreatedBy", "")
logs = getattr(automation, "executionLogs", None) or []
mandateName = _resolveMandateLabel(mandateId)

View file

@ -1477,7 +1477,7 @@ def cleanup_duplicate_access_rules(
for sig, rules in rulesBySignature.items():
if len(rules) > 1:
# Sort by creation time (keep oldest)
rules.sort(key=lambda r: r.get("_createdAt", 0))
rules.sort(key=lambda r: r.get("sysCreatedAt", 0))
keepRule = rules[0]
deleteRules = rules[1:]

View file

@ -564,7 +564,7 @@ def getTransactions(
aicoreProvider=t.get("aicoreProvider"),
aicoreModel=t.get("aicoreModel"),
createdByUserId=t.get("createdByUserId"),
createdAt=t.get("_createdAt"),
createdAt=t.get("sysCreatedAt"),
mandateId=t.get("mandateId"),
mandateName=t.get("mandateName")
))
@ -1421,7 +1421,7 @@ def _enrichTransactionRows(transactions) -> List[Dict[str, Any]]:
aicoreProvider=t.get("aicoreProvider"),
aicoreModel=t.get("aicoreModel"),
createdByUserId=t.get("createdByUserId"),
createdAt=t.get("_createdAt")
createdAt=t.get("sysCreatedAt")
)
result.append(row.model_dump())
@ -1465,7 +1465,7 @@ def _buildTransactionsList(ctx: RequestContext, targetMandateId: str) -> List[Di
aicoreProvider=t.get("aicoreProvider"),
aicoreModel=t.get("aicoreModel"),
createdByUserId=t.get("createdByUserId"),
createdAt=t.get("_createdAt")
createdAt=t.get("sysCreatedAt")
)
result.append(row.model_dump())
@ -1641,7 +1641,7 @@ def getMandateViewTransactions(
aicoreProvider=t.get("aicoreProvider"),
aicoreModel=t.get("aicoreModel"),
createdByUserId=t.get("createdByUserId"),
createdAt=t.get("_createdAt"),
createdAt=t.get("sysCreatedAt"),
mandateId=t.get("mandateId"),
mandateName=t.get("mandateName")
))
@ -1796,7 +1796,7 @@ def getUserViewStatistics(
skippedNotDebit = 0
for t in allTransactions:
createdAt = t.get("_createdAt")
createdAt = t.get("sysCreatedAt")
if not createdAt:
skippedNoDate += 1
continue
@ -1972,7 +1972,7 @@ def getUserViewTransactions(
"aicoreProvider": t.get("aicoreProvider"),
"aicoreModel": t.get("aicoreModel"),
"createdByUserId": t.get("createdByUserId"),
"createdAt": t.get("_createdAt"),
"createdAt": t.get("sysCreatedAt"),
"mandateId": t.get("mandateId"),
"mandateName": t.get("mandateName"),
"userId": t.get("userId"),
@ -2069,7 +2069,7 @@ def getUserViewTransactionsFilterValues(
"aicoreProvider": t.get("aicoreProvider"),
"aicoreModel": t.get("aicoreModel"),
"createdByUserId": t.get("createdByUserId"),
"createdAt": t.get("_createdAt"),
"createdAt": t.get("sysCreatedAt"),
"mandateId": t.get("mandateId"),
"mandateName": t.get("mandateName"),
"userId": t.get("userId"),

View file

@ -266,7 +266,7 @@ def get_file_filter_values(
pass
try:
recordFilter = {"_createdBy": managementInterface.userId}
recordFilter = {"sysCreatedBy": managementInterface.userId}
values = managementInterface.db.getDistinctColumnValues(
FileItem, column, crossFilterPagination, recordFilter
)

View file

@ -261,7 +261,7 @@ class RbacClass:
# No mandate context: load roles from ALL user's mandates.
# Required for user-owned namespaces (files, chat, automation) that
# are accessed without mandate context (e.g., /api/files/ endpoints).
# Data isolation is still enforced by _createdBy WHERE clause.
# Data isolation is still enforced by sysCreatedBy WHERE clause.
allUserMandates = self.dbApp.getRecordset(
UserMandate,
recordFilter={"userId": user.id, "enabled": True}

View file

@ -441,13 +441,13 @@ def _buildWorkflowHintItems(
import time as _time
now = _time.time()
others.sort(key=lambda w: w.get("_createdAt") or w.get("startedAt") or 0, reverse=True)
others.sort(key=lambda w: w.get("sysCreatedAt") or w.get("startedAt") or 0, reverse=True)
others = others[:10]
items = []
for wf in others:
name = wf.get("name") or "(unnamed)"
createdAt = wf.get("_createdAt") or wf.get("startedAt") or 0
createdAt = wf.get("sysCreatedAt") or wf.get("startedAt") or 0
ageSec = now - createdAt if createdAt else 0
if ageSec < 3600:
ageStr = f"{int(ageSec / 60)}m ago"
@ -3188,7 +3188,7 @@ def _registerCoreTools(registry: ToolRegistry, services):
allWorkflows = chatInterface.getWorkflows() or []
allWorkflows.sort(
key=lambda w: w.get("_createdAt") or w.get("startedAt") or 0,
key=lambda w: w.get("sysCreatedAt") or w.get("startedAt") or 0,
reverse=True,
)
allWorkflows = allWorkflows[:50]
@ -3197,7 +3197,7 @@ def _registerCoreTools(registry: ToolRegistry, services):
for wf in allWorkflows:
wfId = wf.get("id", "")
name = wf.get("name") or "(unnamed)"
createdAt = wf.get("_createdAt") or wf.get("startedAt") or 0
createdAt = wf.get("sysCreatedAt") or wf.get("startedAt") or 0
lastActivity = wf.get("lastActivity") or createdAt
msgs = chatInterface.getMessages(wfId) or []
@ -3275,7 +3275,7 @@ def _registerCoreTools(registry: ToolRegistry, services):
items.append({
"role": raw.get("role", ""),
"message": content,
"publishedAt": raw.get("publishedAt") or raw.get("_createdAt") or 0,
"publishedAt": raw.get("publishedAt") or raw.get("sysCreatedAt") or 0,
})
header = f"Workflow {targetWorkflowId}: {len(allMsgs)} total messages"

View file

@ -124,7 +124,7 @@ class KnowledgeService:
_fileScope = _get("scope")
if _fileScope:
index.scope = _fileScope
_fileCreatedBy = _get("_createdBy")
_fileCreatedBy = _get("sysCreatedBy")
if _fileCreatedBy:
index.userId = str(_fileCreatedBy)
except Exception:

View file

@ -74,6 +74,18 @@ def getModelLabels(modelName: str, language: str = "en") -> Dict[str, str]:
}
def _mergedAttributeLabels(modelClass: Type[BaseModel], userLanguage: str) -> Dict[str, str]:
"""Merge attribute labels from model MRO (base classes first, subclass overrides)."""
try:
baseIdx = modelClass.__mro__.index(BaseModel)
except ValueError:
return getModelLabels(modelClass.__name__, userLanguage)
merged: Dict[str, str] = {}
for cls in reversed(modelClass.__mro__[:baseIdx]):
merged.update(getModelLabels(cls.__name__, userLanguage))
return merged
def getModelLabel(modelName: str, language: str = "en") -> str:
"""
Get the label for a model in the specified language.
@ -106,7 +118,7 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
attributes = []
model_name = modelClass.__name__
labels = getModelLabels(model_name, userLanguage)
labels = _mergedAttributeLabels(modelClass, userLanguage)
model_label = getModelLabel(model_name, userLanguage)
# Pydantic v2 only

View file

@ -74,7 +74,7 @@ _INDEXES = [
# Invitation indexes
("Invitation", "idx_invitation_mandate", ["mandateId"]),
("Invitation", "idx_invitation_createdby", ["createdBy"]),
("Invitation", "idx_invitation_syscreatedby", ["sysCreatedBy"]),
]
# Unique indexes (separate list)

View file

@ -35,8 +35,8 @@ USER_COLUMNS = [
"createdBy",
"usedBy",
"revokedBy",
"_createdBy",
"_modifiedBy",
"sysCreatedBy",
"sysModifiedBy",
]
@ -284,12 +284,12 @@ def _anonymizeRecords(
# Build WHERE clause for primary key
whereClause = " AND ".join([f'"{pk}" = %s' for pk in pkColumns])
# Check if table has _modifiedAt column
# Check if table has sysModifiedAt column
columns = _getTableColumns(dbConnector, tableName)
hasModifiedAt = "_modifiedAt" in columns
hasModifiedAt = "sysModifiedAt" in columns
if hasModifiedAt:
query = f'UPDATE "{tableName}" SET "{columnName}" = %s, "_modifiedAt" = %s WHERE {whereClause}'
query = f'UPDATE "{tableName}" SET "{columnName}" = %s, "sysModifiedAt" = %s WHERE {whereClause}'
params = [anonymousValue, getUtcTimestamp()]
else:
query = f'UPDATE "{tableName}" SET "{columnName}" = %s WHERE {whereClause}'

View file

@ -76,7 +76,7 @@ async def executeAutomation(automationId: str, automation, creatorUser: User, se
Args:
automationId: ID of automation to execute
automation: Pre-loaded automation object (with system fields like _createdBy)
automation: Pre-loaded automation object (with system fields like sysCreatedBy)
creatorUser: The user who created the automation (workflow runs in this context)
services: Services instance (used for interfaceDbApp etc.)
@ -302,10 +302,10 @@ def createAutomationEventHandler(automationId: str, eventUser):
logger.warning(f"Automation {automationId} not found or not active, skipping execution")
return
# Get creator user ID from automation's _createdBy system field
creatorUserId = getattr(automation, "_createdBy", None)
# Get creator user ID from automation's sysCreatedBy system field
creatorUserId = getattr(automation, "sysCreatedBy", None)
if not creatorUserId:
logger.error(f"Automation {automationId} has no creator user (_createdBy missing)")
logger.error(f"Automation {automationId} has no creator user (sysCreatedBy missing)")
return
# Get creator user from database (using SysAdmin access)

View file

@ -24,7 +24,7 @@ Optionen:
Die Struktur-Datei wird automatisch als <dateiname>_structure.json erstellt
--pretty, -p JSON formatiert ausgeben (für bessere Lesbarkeit)
--exclude Komma-getrennte Liste von Tabellen, die ausgeschlossen werden sollen
--include-meta System-Metadaten (_createdAt, _modifiedAt, etc.) beibehalten
--include-meta System-Metadaten (sysCreatedAt, sysModifiedAt, etc.) beibehalten
--db Nur bestimmte Datenbank(en) exportieren (komma-getrennt)
"""
@ -245,7 +245,12 @@ def _getTableData(conn, tableName: str, includeMeta: bool = False) -> List[Dict[
# Optional: System-Metadaten entfernen
if not includeMeta:
metaFields = ["_createdAt", "_modifiedAt", "_createdBy", "_modifiedBy"]
metaFields = [
"sysCreatedAt",
"sysModifiedAt",
"sysCreatedBy",
"sysModifiedBy",
]
for field in metaFields:
record.pop(field, None)
@ -789,7 +794,7 @@ Beispiele:
parser.add_argument(
"--include-meta",
help="System-Metadaten (_createdAt, etc.) beibehalten",
help="System-Metadaten (sysCreatedAt, sysModifiedAt, sysCreatedBy, sysModifiedBy) beibehalten",
action="store_true"
)

View file

@ -50,7 +50,6 @@ class TestRbacDatabaseFiltering:
id="test_user_all",
username="testuser",
roleLabels=["sysadmin"],
mandateId="test_mandate_all"
)
whereClause = db.buildRbacWhereClause(permissions, user, "SomeTable")
@ -73,13 +72,12 @@ class TestRbacDatabaseFiltering:
id="test_user_my",
username="testuser",
roleLabels=["user"],
mandateId="test_mandate_my"
)
whereClause = db.buildRbacWhereClause(permissions, user, "SomeTable")
assert whereClause is not None
assert whereClause["condition"] == '"_createdBy" = %s'
assert whereClause["condition"] == '"sysCreatedBy" = %s'
assert whereClause["values"] == ["test_user_my"]
def testBuildRbacWhereClauseGroupAccess(self, db):
@ -93,17 +91,19 @@ class TestRbacDatabaseFiltering:
delete=AccessLevel.GROUP
)
mandate_id = "test_mandate_group"
user = User(
id="test_user_group",
username="testuser",
roleLabels=["admin"],
mandateId="test_mandate_group"
)
whereClause = db.buildRbacWhereClause(permissions, user, "SomeTable")
whereClause = db.buildRbacWhereClause(
permissions, user, "SomeTable", mandateId=mandate_id
)
assert whereClause is not None
assert whereClause["condition"] == '"mandateId" = %s'
assert whereClause["condition"] == '("mandateId" = %s OR "mandateId" IS NULL)'
assert whereClause["values"] == ["test_mandate_group"]
def testBuildRbacWhereClauseNoAccess(self, db):
@ -121,7 +121,6 @@ class TestRbacDatabaseFiltering:
id="test_user_none",
username="testuser",
roleLabels=["viewer"],
mandateId="test_mandate_none"
)
whereClause = db.buildRbacWhereClause(permissions, user, "SomeTable")
@ -145,7 +144,6 @@ class TestRbacDatabaseFiltering:
id="test_user_in_db",
username="testuser",
roleLabels=["user"],
mandateId="test_mandate_in_db"
)
whereClause = db.buildRbacWhereClause(permissions, user, "UserInDB")
@ -156,56 +154,84 @@ class TestRbacDatabaseFiltering:
assert whereClause["values"] == ["test_user_in_db"]
def testBuildRbacWhereClauseUserConnectionTable(self, db):
"""Test WHERE clause building for UserConnection table with GROUP access."""
# Create test users in the same mandate for GROUP access testing
from modules.datamodels.datamodelUam import UserInDB
testMandateId = "test_mandate_group"
# Create test users
user1 = UserInDB(
id="test_user1",
username="testuser1",
mandateId=testMandateId
)
user2 = UserInDB(
id="test_user2",
username="testuser2",
mandateId=testMandateId
)
"""GROUP on UserConnection resolves member userIds via UserMandate (multi-tenant)."""
from modules.datamodels.datamodelUam import UserInDB, Mandate
from modules.datamodels.datamodelMembership import UserMandate
testMandateId = "rbac_test_mandate_uc"
user1Id = "rbac_test_user_uc1"
user2Id = "rbac_test_user_uc2"
userMandateIds = []
try:
user1Data = user1.model_dump()
user1Data["id"] = user1.id
user2Data = user2.model_dump()
user2Data["id"] = user2.id
db.recordCreate(UserInDB, user1Data)
db.recordCreate(UserInDB, user2Data)
mandate = Mandate(
id=testMandateId,
name="RBAC test mandate",
label="RBAC test",
)
mandatePayload = mandate.model_dump()
mandatePayload["id"] = mandate.id
db.recordCreate(Mandate, mandatePayload)
for uid, uname in (
(user1Id, "rbac_uc_user1"),
(user2Id, "rbac_uc_user2"),
):
u = UserInDB(
id=uid,
username=uname,
email=f"{uid}@example.com",
hashedPassword="not-used",
)
payload = u.model_dump()
payload["id"] = u.id
db.recordCreate(UserInDB, payload)
for uid in (user1Id, user2Id):
um = UserMandate(userId=uid, mandateId=testMandateId, enabled=True)
umPayload = um.model_dump()
umPayload["id"] = um.id
createdUm = db.recordCreate(UserMandate, umPayload)
if createdUm and createdUm.get("id"):
userMandateIds.append(createdUm["id"])
else:
userMandateIds.append(um.id)
permissions = UserPermissions(
view=True,
read=AccessLevel.GROUP,
create=AccessLevel.GROUP,
update=AccessLevel.GROUP,
delete=AccessLevel.GROUP
delete=AccessLevel.GROUP,
)
user = User(
id="test_user1",
username="testuser1",
id=user1Id,
username="rbac_uc_user1",
roleLabels=["admin"],
mandateId=testMandateId
)
whereClause = db.buildRbacWhereClause(permissions, user, "UserConnection")
whereClause = db.buildRbacWhereClause(
permissions, user, "UserConnection", mandateId=testMandateId
)
assert whereClause is not None
assert whereClause["condition"] != "1 = 0"
assert "userId" in whereClause["condition"]
assert "IN" in whereClause["condition"]
assert len(whereClause["values"]) >= 2
assert set(whereClause["values"]) == {user1Id, user2Id}
finally:
# Cleanup test users
for umId in userMandateIds:
try:
db.recordDelete(UserMandate, umId)
except Exception:
pass
for uid in (user1Id, user2Id):
try:
db.recordDelete(UserInDB, uid)
except Exception:
pass
try:
db.recordDelete(UserInDB, "test_user1")
db.recordDelete(UserInDB, "test_user2")
except:
db.recordDelete(Mandate, testMandateId)
except Exception:
pass