datamodel sctirc fk logic in one place

This commit is contained in:
ValueOn AG 2026-04-26 18:11:42 +02:00
parent 8221a0da3e
commit 564a1200c6
80 changed files with 1808 additions and 1004 deletions

8
app.py
View file

@ -294,6 +294,14 @@ except Exception as e:
async def lifespan(app: FastAPI): async def lifespan(app: FastAPI):
logger.info("Application is starting up") logger.info("Application is starting up")
# Validate FK metadata on all Pydantic models (fail-fast, no silent fallbacks)
from modules.shared.fkRegistry import validateFkTargets
fkErrors = validateFkTargets()
if fkErrors:
for err in fkErrors:
logger.error("FK metadata validation: %s", err)
raise SystemExit(f"FK metadata validation failed ({len(fkErrors)} error(s)) — fix datamodels before starting")
# AI connectors already pre-warmed at module-load via _eager_prewarm() in aicoreModelRegistry. # AI connectors already pre-warmed at module-load via _eager_prewarm() in aicoreModelRegistry.
# Bootstrap database if needed (creates initial users, mandates, roles, etc.) # Bootstrap database if needed (creates initial users, mandates, roles, etc.)

View file

@ -9,6 +9,7 @@ import logging
import importlib import importlib
import os import os
import time import time
import threading
from typing import Dict, List, Optional, Any, Tuple from typing import Dict, List, Optional, Any, Tuple
from modules.datamodels.datamodelAi import AiModel from modules.datamodels.datamodelAi import AiModel
from .aicoreBase import BaseConnectorAi from .aicoreBase import BaseConnectorAi
@ -31,6 +32,7 @@ class ModelRegistry:
self._connectors: Dict[str, BaseConnectorAi] = {} self._connectors: Dict[str, BaseConnectorAi] = {}
self._lastRefresh: Optional[float] = None self._lastRefresh: Optional[float] = None
self._refreshInterval: float = 300.0 # 5 minutes self._refreshInterval: float = 300.0 # 5 minutes
self._refreshLock = threading.Lock()
self._connectorsInitialized: bool = False self._connectorsInitialized: bool = False
self._discoveredConnectorsCache: Optional[List[BaseConnectorAi]] = None # Avoid re-instantiating on every discoverConnectors() call self._discoveredConnectorsCache: Optional[List[BaseConnectorAi]] = None # Avoid re-instantiating on every discoverConnectors() call
self._getAvailableModelsCache: Dict[Tuple[str, int], Tuple[List[AiModel], float]] = {} # (user_id, rbac_id) -> (models, ts) self._getAvailableModelsCache: Dict[Tuple[str, int], Tuple[List[AiModel], float]] = {} # (user_id, rbac_id) -> (models, ts)
@ -47,26 +49,10 @@ class ModelRegistry:
self._connectors[connectorType] = connector self._connectors[connectorType] = connector
# Collect models from this connector
try: try:
models = connector.getCachedModels() models = connector.getCachedModels()
for model in models: for model in models:
# Validate displayName uniqueness self._addModel(model, connectorType)
if model.displayName in self._models:
existingModel = self._models[model.displayName]
errorMsg = f"Duplicate displayName '{model.displayName}' detected! Existing model: displayName='{existingModel.displayName}', name='{existingModel.name}' (connector: {existingModel.connectorType}), New model: displayName='{model.displayName}', name='{model.name}' (connector: {connectorType}). displayName must be unique."
logger.error(errorMsg)
raise ValueError(errorMsg)
# TODO TESTING: Override maxTokens if testing override is enabled
if TESTING_MAX_TOKENS_OVERRIDE is not None and model.maxTokens > TESTING_MAX_TOKENS_OVERRIDE:
originalMaxTokens = model.maxTokens
model.maxTokens = TESTING_MAX_TOKENS_OVERRIDE
logger.debug(f"TESTING: Overrode maxTokens for {model.displayName}: {originalMaxTokens} -> {TESTING_MAX_TOKENS_OVERRIDE}")
# Use displayName as the key (must be unique)
self._models[model.displayName] = model
logger.debug(f"Registered model: {model.displayName} (name: {model.name}) from {connectorType}")
except Exception as e: except Exception as e:
logger.error(f"Failed to register models from {connectorType}: {e}") logger.error(f"Failed to register models from {connectorType}: {e}")
raise raise

View file

@ -49,6 +49,102 @@ class AiAnthropic(BaseConnectorAi):
def getModels(self) -> List[AiModel]: def getModels(self) -> List[AiModel]:
# Get all available Anthropic models. # Get all available Anthropic models.
return [ return [
AiModel(
name="claude-opus-4-7",
displayName="Anthropic Claude Opus 4.7",
connectorType="anthropic",
apiUrl="https://api.anthropic.com/v1/messages",
temperature=0.2,
maxTokens=128000,
contextLength=1000000,
costPer1kTokensInput=0.005, # $5/M tokens (Anthropic API, 2026-04)
costPer1kTokensOutput=0.025, # $25/M tokens
speedRating=5,
qualityRating=10,
functionCall=self.callAiBasic,
functionCallStream=self.callAiBasicStream,
priority=PriorityEnum.QUALITY,
processingMode=ProcessingModeEnum.DETAILED,
operationTypes=createOperationTypeRatings(
(OperationTypeEnum.PLAN, 10),
(OperationTypeEnum.DATA_ANALYSE, 9),
(OperationTypeEnum.DATA_GENERATE, 10),
(OperationTypeEnum.DATA_EXTRACT, 9),
(OperationTypeEnum.AGENT, 10),
(OperationTypeEnum.DATA_QUERY, 3),
),
version="claude-opus-4-7",
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.025
),
AiModel(
name="claude-sonnet-4-6",
displayName="Anthropic Claude Sonnet 4.6",
connectorType="anthropic",
apiUrl="https://api.anthropic.com/v1/messages",
temperature=0.2,
maxTokens=64000,
contextLength=1000000,
costPer1kTokensInput=0.003, # $3/M tokens
costPer1kTokensOutput=0.015, # $15/M tokens
speedRating=7,
qualityRating=10,
functionCall=self.callAiBasic,
functionCallStream=self.callAiBasicStream,
priority=PriorityEnum.BALANCED,
processingMode=ProcessingModeEnum.ADVANCED,
operationTypes=createOperationTypeRatings(
(OperationTypeEnum.PLAN, 9),
(OperationTypeEnum.DATA_ANALYSE, 9),
(OperationTypeEnum.DATA_GENERATE, 9),
(OperationTypeEnum.DATA_EXTRACT, 8),
(OperationTypeEnum.AGENT, 9),
(OperationTypeEnum.DATA_QUERY, 9),
),
version="claude-sonnet-4-6",
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.003 + (bytesReceived / 4 / 1000) * 0.015
),
AiModel(
name="claude-opus-4-7",
displayName="Anthropic Claude Opus 4.7 Vision",
connectorType="anthropic",
apiUrl="https://api.anthropic.com/v1/messages",
temperature=0.2,
maxTokens=128000,
contextLength=1000000,
costPer1kTokensInput=0.005,
costPer1kTokensOutput=0.025,
speedRating=5,
qualityRating=10,
functionCall=self.callAiImage,
priority=PriorityEnum.QUALITY,
processingMode=ProcessingModeEnum.DETAILED,
operationTypes=createOperationTypeRatings(
(OperationTypeEnum.IMAGE_ANALYSE, 10)
),
version="claude-opus-4-7",
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.025
),
AiModel(
name="claude-sonnet-4-6",
displayName="Anthropic Claude Sonnet 4.6 Vision",
connectorType="anthropic",
apiUrl="https://api.anthropic.com/v1/messages",
temperature=0.2,
maxTokens=64000,
contextLength=1000000,
costPer1kTokensInput=0.003,
costPer1kTokensOutput=0.015,
speedRating=6,
qualityRating=10,
functionCall=self.callAiImage,
priority=PriorityEnum.QUALITY,
processingMode=ProcessingModeEnum.DETAILED,
operationTypes=createOperationTypeRatings(
(OperationTypeEnum.IMAGE_ANALYSE, 10)
),
version="claude-sonnet-4-6",
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.003 + (bytesReceived / 4 / 1000) * 0.015
),
AiModel( AiModel(
name="claude-sonnet-4-5-20250929", name="claude-sonnet-4-5-20250929",
displayName="Anthropic Claude Sonnet 4.5", displayName="Anthropic Claude Sonnet 4.5",

View file

@ -123,6 +123,135 @@ class AiOpenai(BaseConnectorAi):
version="gpt-4o", version="gpt-4o",
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0025 + (bytesReceived / 4 / 1000) * 0.01 calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0025 + (bytesReceived / 4 / 1000) * 0.01
), ),
AiModel(
name="gpt-5.5",
displayName="OpenAI GPT-5.5",
connectorType="openai",
apiUrl="https://api.openai.com/v1/chat/completions",
temperature=0.2,
maxTokens=128000,
contextLength=1050000,
costPer1kTokensInput=0.005, # $5/M tokens (OpenAI API, 2026-04)
costPer1kTokensOutput=0.03, # $30/M tokens
speedRating=8,
qualityRating=10,
functionCall=self.callAiBasic,
functionCallStream=self.callAiBasicStream,
priority=PriorityEnum.QUALITY,
processingMode=ProcessingModeEnum.DETAILED,
operationTypes=createOperationTypeRatings(
(OperationTypeEnum.PLAN, 10),
(OperationTypeEnum.DATA_ANALYSE, 10),
(OperationTypeEnum.DATA_GENERATE, 10),
(OperationTypeEnum.DATA_EXTRACT, 8),
(OperationTypeEnum.AGENT, 10),
(OperationTypeEnum.DATA_QUERY, 8),
),
version="gpt-5.5",
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.03
),
AiModel(
name="gpt-5.4",
displayName="OpenAI GPT-5.4",
connectorType="openai",
apiUrl="https://api.openai.com/v1/chat/completions",
temperature=0.2,
maxTokens=128000,
contextLength=1050000,
costPer1kTokensInput=0.0025, # $2.50/M tokens
costPer1kTokensOutput=0.015, # $15/M tokens
speedRating=8,
qualityRating=10,
functionCall=self.callAiBasic,
functionCallStream=self.callAiBasicStream,
priority=PriorityEnum.BALANCED,
processingMode=ProcessingModeEnum.ADVANCED,
operationTypes=createOperationTypeRatings(
(OperationTypeEnum.PLAN, 9),
(OperationTypeEnum.DATA_ANALYSE, 10),
(OperationTypeEnum.DATA_GENERATE, 10),
(OperationTypeEnum.DATA_EXTRACT, 8),
(OperationTypeEnum.AGENT, 9),
(OperationTypeEnum.DATA_QUERY, 8),
),
version="gpt-5.4",
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0025 + (bytesReceived / 4 / 1000) * 0.015
),
AiModel(
name="gpt-5.4-mini",
displayName="OpenAI GPT-5.4 Mini",
connectorType="openai",
apiUrl="https://api.openai.com/v1/chat/completions",
temperature=0.2,
maxTokens=128000,
contextLength=400000,
costPer1kTokensInput=0.00075, # $0.75/M tokens
costPer1kTokensOutput=0.0045, # $4.50/M tokens
speedRating=9,
qualityRating=9,
functionCall=self.callAiBasic,
functionCallStream=self.callAiBasicStream,
priority=PriorityEnum.SPEED,
processingMode=ProcessingModeEnum.BASIC,
operationTypes=createOperationTypeRatings(
(OperationTypeEnum.PLAN, 8),
(OperationTypeEnum.DATA_ANALYSE, 9),
(OperationTypeEnum.DATA_GENERATE, 9),
(OperationTypeEnum.DATA_EXTRACT, 8),
(OperationTypeEnum.AGENT, 8),
(OperationTypeEnum.DATA_QUERY, 10),
),
version="gpt-5.4-mini",
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.00075 + (bytesReceived / 4 / 1000) * 0.0045
),
AiModel(
name="gpt-5.4-nano",
displayName="OpenAI GPT-5.4 Nano",
connectorType="openai",
apiUrl="https://api.openai.com/v1/chat/completions",
temperature=0.2,
maxTokens=128000,
contextLength=400000,
costPer1kTokensInput=0.0002, # $0.20/M tokens
costPer1kTokensOutput=0.00125, # $1.25/M tokens
speedRating=10,
qualityRating=7,
functionCall=self.callAiBasic,
functionCallStream=self.callAiBasicStream,
priority=PriorityEnum.COST,
processingMode=ProcessingModeEnum.BASIC,
operationTypes=createOperationTypeRatings(
(OperationTypeEnum.PLAN, 7),
(OperationTypeEnum.DATA_ANALYSE, 7),
(OperationTypeEnum.DATA_GENERATE, 8),
(OperationTypeEnum.DATA_EXTRACT, 9),
(OperationTypeEnum.AGENT, 7),
(OperationTypeEnum.DATA_QUERY, 10),
),
version="gpt-5.4-nano",
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0002 + (bytesReceived / 4 / 1000) * 0.00125
),
AiModel(
name="gpt-5.5",
displayName="OpenAI GPT-5.5 Vision",
connectorType="openai",
apiUrl="https://api.openai.com/v1/chat/completions",
temperature=0.2,
maxTokens=128000,
contextLength=1050000,
costPer1kTokensInput=0.005,
costPer1kTokensOutput=0.03,
speedRating=6,
qualityRating=10,
functionCall=self.callAiImage,
priority=PriorityEnum.QUALITY,
processingMode=ProcessingModeEnum.DETAILED,
operationTypes=createOperationTypeRatings(
(OperationTypeEnum.IMAGE_ANALYSE, 10)
),
version="gpt-5.5",
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.03
),
AiModel( AiModel(
name="text-embedding-3-small", name="text-embedding-3-small",
displayName="OpenAI Embedding Small", displayName="OpenAI Embedding Small",

View file

@ -561,29 +561,48 @@ class DatabaseConnector:
f"Could not add column '{col}' to '{table}': {add_err}" f"Could not add column '{col}' to '{table}': {add_err}"
) )
# Targeted type-downgrade: if a model field has been # Column type migrations for existing tables.
# changed from a structured type (JSONB) to a plain # TEXT→DOUBLE PRECISION handles three value shapes:
# TEXT field, alter the column so writes don't fail. # 1. NULL / empty string → NULL
# JSONB -> TEXT is a safe, lossless cast (JSONB is # 2. ISO date(time) like "2025-01-22" or "2025-01-22T10:00:00+00" → epoch via EXTRACT
# rendered as its JSON-text representation; the # 3. Plain numeric string like "3.14" → direct cast
# corresponding Pydantic ``@field_validator`` is _TEXT_TO_DOUBLE = (
# responsible for re-decoding legacy data on read). 'DOUBLE PRECISION USING CASE'
' WHEN "{col}" IS NULL OR "{col}" = \'\' THEN NULL'
' WHEN "{col}" ~ \'^\\d{4}-\\d{2}-\\d{2}\''
' THEN EXTRACT(EPOCH FROM "{col}"::timestamptz)'
' ELSE NULLIF("{col}", \'\')::double precision'
' END'
)
_SAFE_TYPE_CHANGES = {
("jsonb", "TEXT"): "TEXT USING \"{col}\"::text",
("text", "DOUBLE PRECISION"): _TEXT_TO_DOUBLE,
("text", "INTEGER"): "INTEGER USING NULLIF(\"{col}\", '')::integer",
("timestamp without time zone", "DOUBLE PRECISION"): 'DOUBLE PRECISION USING EXTRACT(EPOCH FROM "{col}" AT TIME ZONE \'UTC\')',
("timestamp with time zone", "DOUBLE PRECISION"): 'DOUBLE PRECISION USING EXTRACT(EPOCH FROM "{col}")',
("date", "DOUBLE PRECISION"): 'DOUBLE PRECISION USING EXTRACT(EPOCH FROM "{col}"::timestamp AT TIME ZONE \'UTC\')',
}
for col in sorted(desired_columns & existing_columns): for col in sorted(desired_columns & existing_columns):
if col == "id": if col == "id":
continue continue
desired_sql = (model_fields.get(col) or "").upper() desired_sql = (model_fields.get(col) or "").upper()
currentType = existing_column_types.get(col, "") currentType = existing_column_types.get(col, "")
if desired_sql == "TEXT" and currentType == "jsonb": migration = _SAFE_TYPE_CHANGES.get((currentType, desired_sql))
if migration:
castExpr = migration.replace("{col}", col)
try: try:
cursor.execute('SAVEPOINT col_migrate')
cursor.execute( cursor.execute(
f'ALTER TABLE "{table}" ALTER COLUMN "{col}" TYPE TEXT USING "{col}"::text' f'ALTER TABLE "{table}" ALTER COLUMN "{col}" TYPE {castExpr}'
) )
cursor.execute('RELEASE SAVEPOINT col_migrate')
logger.info( logger.info(
f"Downgraded column '{col}' from JSONB to TEXT on '{table}'" f"Migrated column '{col}' from {currentType} to {desired_sql} on '{table}'"
) )
except Exception as alter_err: except Exception as alter_err:
cursor.execute('ROLLBACK TO SAVEPOINT col_migrate')
logger.warning( logger.warning(
f"Could not downgrade column '{col}' on '{table}': {alter_err}" f"Could not migrate column '{col}' on '{table}': {alter_err}"
) )
except Exception as ensure_err: except Exception as ensure_err:
logger.warning( logger.warning(
@ -1096,8 +1115,15 @@ class DatabaseConnector:
values.append(f"%{v}") values.append(f"%{v}")
elif op in ("gt", "gte", "lt", "lte"): elif op in ("gt", "gte", "lt", "lte"):
sqlOp = {"gt": ">", "gte": ">=", "lt": "<", "lte": "<="}[op] sqlOp = {"gt": ">", "gte": ">=", "lt": "<", "lte": "<="}[op]
where_parts.append(f'"{key}"::TEXT {sqlOp} %s') if colType in ("INTEGER", "DOUBLE PRECISION"):
values.append(str(v)) try:
where_parts.append(f'"{key}"::double precision {sqlOp} %s')
values.append(float(v))
except (ValueError, TypeError):
continue
else:
where_parts.append(f'"{key}"::TEXT {sqlOp} %s')
values.append(str(v))
elif op == "between": elif op == "between":
fromVal = v.get("from", "") if isinstance(v, dict) else "" fromVal = v.get("from", "") if isinstance(v, dict) else ""
toVal = v.get("to", "") if isinstance(v, dict) else "" toVal = v.get("to", "") if isinstance(v, dict) else ""
@ -1122,6 +1148,21 @@ class DatabaseConnector:
toTs = _dt.strptime(str(toVal), '%Y-%m-%d').replace(hour=23, minute=59, second=59, tzinfo=_tz.utc).timestamp() toTs = _dt.strptime(str(toVal), '%Y-%m-%d').replace(hour=23, minute=59, second=59, tzinfo=_tz.utc).timestamp()
where_parts.append(f'"{key}" <= %s') where_parts.append(f'"{key}" <= %s')
values.append(toTs) values.append(toTs)
elif isNumericCol:
try:
if fromVal and toVal:
where_parts.append(
f'"{key}"::double precision >= %s AND "{key}"::double precision <= %s'
)
values.extend([float(fromVal), float(toVal)])
elif fromVal:
where_parts.append(f'"{key}"::double precision >= %s')
values.append(float(fromVal))
elif toVal:
where_parts.append(f'"{key}"::double precision <= %s')
values.append(float(toVal))
except (ValueError, TypeError):
continue
else: else:
if fromVal and toVal: if fromVal and toVal:
where_parts.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s') where_parts.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s')

View file

@ -125,7 +125,7 @@ class AiModel(BaseModel):
# Metadata # Metadata
version: Optional[str] = Field(default=None, description="Model version") version: Optional[str] = Field(default=None, description="Model version")
lastUpdated: Optional[str] = Field(default=None, description="Last update timestamp") lastUpdated: Optional[float] = Field(default=None, description="Last update timestamp (UTC unix)", json_schema_extra={"frontend_type": "timestamp"})
model_config = ConfigDict(arbitrary_types_allowed=True) # Allow Callable type model_config = ConfigDict(arbitrary_types_allowed=True) # Allow Callable type

View file

@ -34,7 +34,7 @@ class AiAuditLogEntry(BaseModel):
userId: str = Field( userId: str = Field(
description="ID of the user who triggered the AI call", description="ID of the user who triggered the AI call",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
) )
username: Optional[str] = Field( username: Optional[str] = Field(
default=None, default=None,
@ -43,17 +43,17 @@ class AiAuditLogEntry(BaseModel):
) )
mandateId: str = Field( mandateId: str = Field(
description="Mandate context of the call", description="Mandate context of the call",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}}, json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
) )
featureInstanceId: Optional[str] = Field( featureInstanceId: Optional[str] = Field(
default=None, default=None,
description="Feature instance context", description="Feature instance context",
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}, json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
) )
featureCode: Optional[str] = Field( featureCode: Optional[str] = Field(
default=None, default=None,
description="Feature code (e.g. workspace, trustee)", description="Feature code (e.g. workspace, trustee)",
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}}, json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"}},
) )
instanceLabel: Optional[str] = Field( instanceLabel: Optional[str] = Field(
default=None, default=None,

View file

@ -100,7 +100,7 @@ class AuditLogEntry(BaseModel):
timestamp: float = Field( timestamp: float = Field(
default_factory=getUtcTimestamp, default_factory=getUtcTimestamp,
description="UTC timestamp when the event occurred", description="UTC timestamp when the event occurred",
json_schema_extra={"label": "Zeitstempel", "frontend_type": "datetime", "frontend_readonly": True, "frontend_required": True} json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": True}
) )
# Actor identification # Actor identification
@ -111,7 +111,7 @@ class AuditLogEntry(BaseModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"}, "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
}, },
) )
@ -130,7 +130,7 @@ class AuditLogEntry(BaseModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
}, },
) )
@ -142,7 +142,7 @@ class AuditLogEntry(BaseModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
}, },
) )

View file

@ -64,7 +64,7 @@ class BackgroundJob(PowerOnModel):
description="Mandate scope (used for access checks). None for system-wide jobs.", description="Mandate scope (used for access checks). None for system-wide jobs.",
json_schema_extra={ json_schema_extra={
"label": "Mandanten-ID", "label": "Mandanten-ID",
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
}, },
) )
featureInstanceId: Optional[str] = Field( featureInstanceId: Optional[str] = Field(
@ -72,7 +72,7 @@ class BackgroundJob(PowerOnModel):
description="Feature instance scope (optional)", description="Feature instance scope (optional)",
json_schema_extra={ json_schema_extra={
"label": "Feature-Instanz", "label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
}, },
) )
triggeredBy: Optional[str] = Field( triggeredBy: Optional[str] = Field(
@ -113,18 +113,18 @@ class BackgroundJob(PowerOnModel):
json_schema_extra={"label": "Fehler"}, json_schema_extra={"label": "Fehler"},
) )
createdAt: datetime = Field( createdAt: float = Field(
default_factory=lambda: datetime.now(timezone.utc), default_factory=lambda: datetime.now(timezone.utc).timestamp(),
description="When the job was submitted", description="When the job was submitted (UTC unix)",
json_schema_extra={"label": "Eingereicht"}, json_schema_extra={"label": "Eingereicht", "frontend_type": "timestamp"},
) )
startedAt: Optional[datetime] = Field( startedAt: Optional[float] = Field(
None, None,
description="When the handler began running", description="When the handler began running (UTC unix)",
json_schema_extra={"label": "Gestartet"}, json_schema_extra={"label": "Gestartet", "frontend_type": "timestamp"},
) )
finishedAt: Optional[datetime] = Field( finishedAt: Optional[float] = Field(
None, None,
description="When the handler reached a terminal status", description="When the handler reached a terminal status (UTC unix)",
json_schema_extra={"label": "Beendet"}, json_schema_extra={"label": "Beendet", "frontend_type": "timestamp"},
) )

View file

@ -46,9 +46,7 @@ class PowerOnModel(BaseModel):
"frontend_required": False, "frontend_required": False,
"frontend_visible": False, "frontend_visible": False,
"system": True, "system": True,
"fk_model": "User", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
"fk_label_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
}, },
) )
sysModifiedAt: Optional[float] = Field( sysModifiedAt: Optional[float] = Field(
@ -73,8 +71,6 @@ class PowerOnModel(BaseModel):
"frontend_required": False, "frontend_required": False,
"frontend_visible": False, "frontend_visible": False,
"system": True, "system": True,
"fk_model": "User", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
"fk_label_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
}, },
) )

View file

@ -49,12 +49,12 @@ class BillingAccount(PowerOnModel):
mandateId: str = Field( mandateId: str = Field(
..., ...,
description="Foreign key to Mandate", description="Foreign key to Mandate",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}}, json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
) )
userId: Optional[str] = Field( userId: Optional[str] = Field(
None, None,
description="Foreign key to User (None = mandate pool account, set = user audit account)", description="Foreign key to User (None = mandate pool account, set = user audit account)",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
) )
balance: float = Field(default=0.0, description="Current balance in CHF", json_schema_extra={"label": "Guthaben (CHF)"}) balance: float = Field(default=0.0, description="Current balance in CHF", json_schema_extra={"label": "Guthaben (CHF)"})
warningThreshold: float = Field( warningThreshold: float = Field(
@ -62,10 +62,10 @@ class BillingAccount(PowerOnModel):
description="Warning threshold in CHF", description="Warning threshold in CHF",
json_schema_extra={"label": "Warnschwelle (CHF)"}, json_schema_extra={"label": "Warnschwelle (CHF)"},
) )
lastWarningAt: Optional[datetime] = Field( lastWarningAt: Optional[float] = Field(
None, None,
description="Last warning sent timestamp", description="Last warning sent timestamp (UTC unix)",
json_schema_extra={"label": "Letzte Warnung"}, json_schema_extra={"label": "Letzte Warnung", "frontend_type": "timestamp"},
) )
enabled: bool = Field(default=True, description="Account is active", json_schema_extra={"label": "Aktiv"}) enabled: bool = Field(default=True, description="Account is active", json_schema_extra={"label": "Aktiv"})
@ -81,7 +81,7 @@ class BillingTransaction(PowerOnModel):
accountId: str = Field( accountId: str = Field(
..., ...,
description="Foreign key to BillingAccount", description="Foreign key to BillingAccount",
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount"}}, json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount", "labelField": None}},
) )
transactionType: TransactionTypeEnum = Field(..., description="Transaction type", json_schema_extra={"label": "Typ"}) transactionType: TransactionTypeEnum = Field(..., description="Transaction type", json_schema_extra={"label": "Typ"})
amount: float = Field(..., description="Amount in CHF (always positive)", json_schema_extra={"label": "Betrag (CHF)"}) amount: float = Field(..., description="Amount in CHF (always positive)", json_schema_extra={"label": "Betrag (CHF)"})
@ -100,19 +100,19 @@ class BillingTransaction(PowerOnModel):
featureInstanceId: Optional[str] = Field( featureInstanceId: Optional[str] = Field(
None, None,
description="Feature instance ID", description="Feature instance ID",
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}, json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
) )
featureCode: Optional[str] = Field( featureCode: Optional[str] = Field(
None, None,
description="Feature code (e.g., automation)", description="Feature code (e.g., automation)",
json_schema_extra={"label": "Feature-Code", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}}, json_schema_extra={"label": "Feature-Code", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"}},
) )
aicoreProvider: Optional[str] = Field(None, description="AICore provider (anthropic, openai, etc.)", json_schema_extra={"label": "AI-Anbieter"}) aicoreProvider: Optional[str] = Field(None, description="AICore provider (anthropic, openai, etc.)", json_schema_extra={"label": "AI-Anbieter"})
aicoreModel: Optional[str] = Field(None, description="AICore model name (e.g., claude-4-sonnet, gpt-4o)", json_schema_extra={"label": "AI-Modell"}) aicoreModel: Optional[str] = Field(None, description="AICore model name (e.g., claude-4-sonnet, gpt-4o)", json_schema_extra={"label": "AI-Modell"})
createdByUserId: Optional[str] = Field( createdByUserId: Optional[str] = Field(
None, None,
description="User who created/caused this transaction", description="User who created/caused this transaction",
json_schema_extra={"label": "Erstellt von Benutzer", "fk_target": {"db": "poweron_app", "table": "User"}}, json_schema_extra={"label": "Erstellt von Benutzer", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
) )
# AI call metadata (for per-call analytics) # AI call metadata (for per-call analytics)
@ -133,7 +133,7 @@ class BillingSettings(BaseModel):
mandateId: str = Field( mandateId: str = Field(
..., ...,
description="Foreign key to Mandate (UNIQUE)", description="Foreign key to Mandate (UNIQUE)",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}}, json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
) )
warningThresholdPercent: float = Field( warningThresholdPercent: float = Field(
@ -158,7 +158,7 @@ class BillingSettings(BaseModel):
) )
rechargeMaxPerMonth: int = Field(default=3, description="Max auto-recharges per month", json_schema_extra={"label": "Max. Nachladungen/Monat"}) rechargeMaxPerMonth: int = Field(default=3, description="Max auto-recharges per month", json_schema_extra={"label": "Max. Nachladungen/Monat"})
rechargesThisMonth: int = Field(default=0, description="Counter: auto-recharges used this month", json_schema_extra={"label": "Nachladungen diesen Monat"}) rechargesThisMonth: int = Field(default=0, description="Counter: auto-recharges used this month", json_schema_extra={"label": "Nachladungen diesen Monat"})
monthResetAt: Optional[datetime] = Field(None, description="When rechargesThisMonth was last reset", json_schema_extra={"label": "Monats-Reset"}) monthResetAt: Optional[float] = Field(None, description="When rechargesThisMonth was last reset (UTC unix)", json_schema_extra={"label": "Monats-Reset", "frontend_type": "timestamp"})
# Notifications # Notifications
notifyEmails: List[str] = Field( notifyEmails: List[str] = Field(
@ -174,10 +174,10 @@ class BillingSettings(BaseModel):
description="Peak indexed data volume MB this billing period", description="Peak indexed data volume MB this billing period",
json_schema_extra={"label": "Speicher-Peak (MB)"}, json_schema_extra={"label": "Speicher-Peak (MB)"},
) )
storagePeriodStartAt: Optional[datetime] = Field( storagePeriodStartAt: Optional[float] = Field(
None, None,
description="Subscription billing period start used for storage reset", description="Subscription billing period start used for storage reset (UTC unix)",
json_schema_extra={"label": "Speicher-Periodenbeginn"}, json_schema_extra={"label": "Speicher-Periodenbeginn", "frontend_type": "timestamp"},
) )
storageBilledUpToMB: float = Field( storageBilledUpToMB: float = Field(
default=0.0, default=0.0,
@ -193,9 +193,10 @@ class StripeWebhookEvent(BaseModel):
description="Primary key", description="Primary key",
) )
event_id: str = Field(..., description="Stripe event ID (evt_xxx)") event_id: str = Field(..., description="Stripe event ID (evt_xxx)")
processed_at: datetime = Field( processed_at: float = Field(
default_factory=lambda: datetime.now(timezone.utc), default_factory=lambda: datetime.now(timezone.utc).timestamp(),
description="When the event was processed", description="When the event was processed (UTC unix)",
json_schema_extra={"frontend_type": "timestamp"},
) )
@ -210,10 +211,14 @@ class UsageStatistics(BaseModel):
accountId: str = Field( accountId: str = Field(
..., ...,
description="Foreign key to BillingAccount", description="Foreign key to BillingAccount",
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount"}}, json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount", "labelField": None}},
) )
periodType: PeriodTypeEnum = Field(..., description="Period type", json_schema_extra={"label": "Periodentyp"}) periodType: PeriodTypeEnum = Field(..., description="Period type", json_schema_extra={"label": "Periodentyp"})
periodStart: date = Field(..., description="Period start date", json_schema_extra={"label": "Periodenbeginn"}) periodStart: date = Field(
...,
description="Period start date",
json_schema_extra={"label": "Periodenbeginn", "frontend_type": "date"},
)
# Aggregated values # Aggregated values
totalCostCHF: float = Field(default=0.0, description="Total cost in CHF", json_schema_extra={"label": "Gesamtkosten (CHF)"}) totalCostCHF: float = Field(default=0.0, description="Total cost in CHF", json_schema_extra={"label": "Gesamtkosten (CHF)"})

View file

@ -16,12 +16,12 @@ class ChatLog(PowerOnModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"}) id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
workflowId: str = Field( workflowId: str = Field(
description="Foreign key to workflow", description="Foreign key to workflow",
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}}, json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
) )
message: str = Field(description="Log message", json_schema_extra={"label": "Nachricht"}) message: str = Field(description="Log message", json_schema_extra={"label": "Nachricht"})
type: str = Field(description="Log type (info, warning, error, etc.)", json_schema_extra={"label": "Typ"}) type: str = Field(description="Log type (info, warning, error, etc.)", json_schema_extra={"label": "Typ"})
timestamp: float = Field(default_factory=getUtcTimestamp, timestamp: float = Field(default_factory=getUtcTimestamp,
description="When the log entry was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel"}) description="When the log entry was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp"})
status: Optional[str] = Field(None, description="Status of the log entry", json_schema_extra={"label": "Status"}) status: Optional[str] = Field(None, description="Status of the log entry", json_schema_extra={"label": "Status"})
progress: Optional[float] = Field(None, description="Progress indicator (0.0 to 1.0)", json_schema_extra={"label": "Fortschritt"}) progress: Optional[float] = Field(None, description="Progress indicator (0.0 to 1.0)", json_schema_extra={"label": "Fortschritt"})
performance: Optional[Dict[str, Any]] = Field(None, description="Performance metrics", json_schema_extra={"label": "Leistung"}) performance: Optional[Dict[str, Any]] = Field(None, description="Performance metrics", json_schema_extra={"label": "Leistung"})
@ -37,11 +37,11 @@ class ChatDocument(PowerOnModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"}) id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
messageId: str = Field( messageId: str = Field(
description="Foreign key to message", description="Foreign key to message",
json_schema_extra={"label": "Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage"}}, json_schema_extra={"label": "Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage", "labelField": None}},
) )
fileId: str = Field( fileId: str = Field(
description="Foreign key to file", description="Foreign key to file",
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem"}}, json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"}},
) )
fileName: str = Field(description="Name of the file", json_schema_extra={"label": "Dateiname"}) fileName: str = Field(description="Name of the file", json_schema_extra={"label": "Dateiname"})
fileSize: int = Field(description="Size of the file", json_schema_extra={"label": "Dateigröße"}) fileSize: int = Field(description="Size of the file", json_schema_extra={"label": "Dateigröße"})
@ -81,12 +81,12 @@ class ChatMessage(PowerOnModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"}) id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
workflowId: str = Field( workflowId: str = Field(
description="Foreign key to workflow", description="Foreign key to workflow",
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}}, json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
) )
parentMessageId: Optional[str] = Field( parentMessageId: Optional[str] = Field(
None, None,
description="Parent message ID for threading", description="Parent message ID for threading",
json_schema_extra={"label": "Übergeordnete Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage"}}, json_schema_extra={"label": "Übergeordnete Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage", "labelField": None}},
) )
documents: List[ChatDocument] = Field(default_factory=list, description="Associated documents", json_schema_extra={"label": "Dokumente"}) documents: List[ChatDocument] = Field(default_factory=list, description="Associated documents", json_schema_extra={"label": "Dokumente"})
documentsLabel: Optional[str] = Field(None, description="Label for the set of documents", json_schema_extra={"label": "Dokumenten-Label"}) documentsLabel: Optional[str] = Field(None, description="Label for the set of documents", json_schema_extra={"label": "Dokumenten-Label"})
@ -97,7 +97,7 @@ class ChatMessage(PowerOnModel):
sequenceNr: Optional[int] = Field(default=0, sequenceNr: Optional[int] = Field(default=0,
description="Sequence number of the message (set automatically)", json_schema_extra={"label": "Sequenznummer"}) description="Sequence number of the message (set automatically)", json_schema_extra={"label": "Sequenznummer"})
publishedAt: Optional[float] = Field(default=None, publishedAt: Optional[float] = Field(default=None,
description="When the message was published (UTC timestamp in seconds)", json_schema_extra={"label": "Veröffentlicht am"}) description="When the message was published (UTC timestamp in seconds)", json_schema_extra={"label": "Veröffentlicht am", "frontend_type": "timestamp"})
success: Optional[bool] = Field(None, description="Whether the message processing was successful", json_schema_extra={"label": "Erfolg"}) success: Optional[bool] = Field(None, description="Whether the message processing was successful", json_schema_extra={"label": "Erfolg"})
actionId: Optional[str] = Field(None, description="ID of the action that produced this message", json_schema_extra={"label": "Aktions-ID"}) actionId: Optional[str] = Field(None, description="ID of the action that produced this message", json_schema_extra={"label": "Aktions-ID"})
actionMethod: Optional[str] = Field(None, description="Method of the action that produced this message", json_schema_extra={"label": "Aktionsmethode"}) actionMethod: Optional[str] = Field(None, description="Method of the action that produced this message", json_schema_extra={"label": "Aktionsmethode"})
@ -125,7 +125,7 @@ class ChatWorkflow(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
}, },
) )
linkedWorkflowId: Optional[str] = Field( linkedWorkflowId: Optional[str] = Field(
@ -219,7 +219,7 @@ class UserInputRequest(BaseModel):
workflowId: Optional[str] = Field( workflowId: Optional[str] = Field(
None, None,
description="Optional ID of the workflow to continue", description="Optional ID of the workflow to continue",
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}}, json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
) )
allowedProviders: Optional[List[str]] = Field(None, description="List of allowed AI providers (multiselect)", json_schema_extra={"label": "Erlaubte Anbieter"}) allowedProviders: Optional[List[str]] = Field(None, description="List of allowed AI providers (multiselect)", json_schema_extra={"label": "Erlaubte Anbieter"})
@ -281,8 +281,8 @@ class ObservationPreview(BaseModel):
# Extended metadata fields # Extended metadata fields
mimeType: Optional[str] = Field(default=None, description="MIME type", json_schema_extra={"label": "MIME-Typ"}) mimeType: Optional[str] = Field(default=None, description="MIME type", json_schema_extra={"label": "MIME-Typ"})
size: Optional[str] = Field(default=None, description="File size", json_schema_extra={"label": "Größe"}) size: Optional[str] = Field(default=None, description="File size", json_schema_extra={"label": "Größe"})
created: Optional[str] = Field(default=None, description="Creation timestamp", json_schema_extra={"label": "Erstellt"}) created: Optional[float] = Field(default=None, description="Creation timestamp (UTC unix)", json_schema_extra={"label": "Erstellt", "frontend_type": "timestamp"})
modified: Optional[str] = Field(default=None, description="Modification timestamp", json_schema_extra={"label": "Geändert"}) modified: Optional[float] = Field(default=None, description="Modification timestamp (UTC unix)", json_schema_extra={"label": "Geändert", "frontend_type": "timestamp"})
typeGroup: Optional[str] = Field(default=None, description="Document type group", json_schema_extra={"label": "Typgruppe"}) typeGroup: Optional[str] = Field(default=None, description="Document type group", json_schema_extra={"label": "Typgruppe"})
documentId: Optional[str] = Field(default=None, description="Document ID", json_schema_extra={"label": "Dokument-ID"}) documentId: Optional[str] = Field(default=None, description="Document ID", json_schema_extra={"label": "Dokument-ID"})
reference: Optional[str] = Field(default=None, description="Document reference", json_schema_extra={"label": "Referenz"}) reference: Optional[str] = Field(default=None, description="Document reference", json_schema_extra={"label": "Referenz"})
@ -332,7 +332,7 @@ class ActionItem(BaseModel):
retryCount: int = Field(default=0, description="Number of retries attempted", json_schema_extra={"label": "Wiederholungen"}) retryCount: int = Field(default=0, description="Number of retries attempted", json_schema_extra={"label": "Wiederholungen"})
retryMax: int = Field(default=3, description="Maximum number of retries", json_schema_extra={"label": "Max. Wiederholungen"}) retryMax: int = Field(default=3, description="Maximum number of retries", json_schema_extra={"label": "Max. Wiederholungen"})
processingTime: Optional[float] = Field(None, description="Processing time in seconds", json_schema_extra={"label": "Bearbeitungszeit"}) processingTime: Optional[float] = Field(None, description="Processing time in seconds", json_schema_extra={"label": "Bearbeitungszeit"})
timestamp: float = Field(..., description="When the action was executed (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel"}) timestamp: float = Field(..., description="When the action was executed (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp"})
result: Optional[str] = Field(None, description="Result of the action", json_schema_extra={"label": "Ergebnis"}) result: Optional[str] = Field(None, description="Result of the action", json_schema_extra={"label": "Ergebnis"})
def setSuccess(self, result: str = None) -> None: def setSuccess(self, result: str = None) -> None:
@ -361,13 +361,13 @@ class TaskItem(BaseModel):
workflowId: str = Field( workflowId: str = Field(
..., ...,
description="Workflow ID", description="Workflow ID",
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}}, json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
) )
userInput: str = Field(..., description="User input that triggered the task", json_schema_extra={"label": "Benutzereingabe"}) userInput: str = Field(..., description="User input that triggered the task", json_schema_extra={"label": "Benutzereingabe"})
status: TaskStatus = Field(default=TaskStatus.PENDING, description="Task status", json_schema_extra={"label": "Status"}) status: TaskStatus = Field(default=TaskStatus.PENDING, description="Task status", json_schema_extra={"label": "Status"})
error: Optional[str] = Field(None, description="Error message if task failed", json_schema_extra={"label": "Fehler"}) error: Optional[str] = Field(None, description="Error message if task failed", json_schema_extra={"label": "Fehler"})
startedAt: Optional[float] = Field(None, description="When the task started (UTC timestamp in seconds)", json_schema_extra={"label": "Gestartet am"}) startedAt: Optional[float] = Field(None, description="When the task started (UTC timestamp in seconds)", json_schema_extra={"label": "Gestartet am", "frontend_type": "timestamp"})
finishedAt: Optional[float] = Field(None, description="When the task finished (UTC timestamp in seconds)", json_schema_extra={"label": "Beendet am"}) finishedAt: Optional[float] = Field(None, description="When the task finished (UTC timestamp in seconds)", json_schema_extra={"label": "Beendet am", "frontend_type": "timestamp"})
actionList: List[ActionItem] = Field(default_factory=list, description="List of actions to execute", json_schema_extra={"label": "Aktionen"}) actionList: List[ActionItem] = Field(default_factory=list, description="List of actions to execute", json_schema_extra={"label": "Aktionen"})
retryCount: int = Field(default=0, description="Number of retries attempted", json_schema_extra={"label": "Wiederholungen"}) retryCount: int = Field(default=0, description="Number of retries attempted", json_schema_extra={"label": "Wiederholungen"})
retryMax: int = Field(default=3, description="Maximum number of retries", json_schema_extra={"label": "Max. Wiederholungen"}) retryMax: int = Field(default=3, description="Maximum number of retries", json_schema_extra={"label": "Max. Wiederholungen"})
@ -402,7 +402,7 @@ class TaskHandover(BaseModel):
improvements: List[str] = Field(default_factory=list, description="Improvement suggestions", json_schema_extra={"label": "Verbesserungen"}) improvements: List[str] = Field(default_factory=list, description="Improvement suggestions", json_schema_extra={"label": "Verbesserungen"})
workflowSummary: Optional[str] = Field(None, description="Summarized workflow context", json_schema_extra={"label": "Workflow-Zusammenfassung"}) workflowSummary: Optional[str] = Field(None, description="Summarized workflow context", json_schema_extra={"label": "Workflow-Zusammenfassung"})
messageHistory: List[str] = Field(default_factory=list, description="Key message summaries", json_schema_extra={"label": "Nachrichtenverlauf"}) messageHistory: List[str] = Field(default_factory=list, description="Key message summaries", json_schema_extra={"label": "Nachrichtenverlauf"})
timestamp: float = Field(..., description="When the handover was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel"}) timestamp: float = Field(..., description="When the handover was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp"})
handoverType: str = Field(default="task", description="Type of handover: task, phase, or workflow", json_schema_extra={"label": "Übergabetyp"}) handoverType: str = Field(default="task", description="Type of handover: task, phase, or workflow", json_schema_extra={"label": "Übergabetyp"})
class TaskContext(BaseModel): class TaskContext(BaseModel):

View file

@ -34,7 +34,7 @@ class ContentObject(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4())) id: str = Field(default_factory=lambda: str(uuid.uuid4()))
fileId: str = Field( fileId: str = Field(
description="FK to the physical file", description="FK to the physical file",
json_schema_extra={"fk_target": {"db": "poweron_management", "table": "FileItem"}}, json_schema_extra={"fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"}},
) )
contentType: str = Field(description="text, image, videostream, audiostream, other") contentType: str = Field(description="text, image, videostream, audiostream, other")
data: str = Field(default="", description="Content data (text, base64, URL)") data: str = Field(default="", description="Content data (text, base64, URL)")

View file

@ -23,7 +23,7 @@ class DataSource(PowerOnModel):
) )
connectionId: str = Field( connectionId: str = Field(
description="FK to UserConnection", description="FK to UserConnection",
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection"}}, json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection", "labelField": "externalUsername"}},
) )
sourceType: str = Field( sourceType: str = Field(
description="sharepointFolder, googleDriveFolder, outlookFolder, ftpFolder, clickupList (path under /team/...)", description="sharepointFolder, googleDriveFolder, outlookFolder, ftpFolder, clickupList (path under /team/...)",
@ -45,17 +45,17 @@ class DataSource(PowerOnModel):
featureInstanceId: Optional[str] = Field( featureInstanceId: Optional[str] = Field(
default=None, default=None,
description="Scoped to feature instance", description="Scoped to feature instance",
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
) )
mandateId: Optional[str] = Field( mandateId: Optional[str] = Field(
default=None, default=None,
description="Mandate scope", description="Mandate scope",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}}, json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
) )
userId: str = Field( userId: str = Field(
default="", default="",
description="Owner user ID", description="Owner user ID",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
) )
autoSync: bool = Field( autoSync: bool = Field(
default=False, default=False,
@ -65,7 +65,7 @@ class DataSource(PowerOnModel):
lastSynced: Optional[float] = Field( lastSynced: Optional[float] = Field(
default=None, default=None,
description="Last sync timestamp", description="Last sync timestamp",
json_schema_extra={"label": "Letzter Sync"}, json_schema_extra={"label": "Letzter Sync", "frontend_type": "timestamp"},
) )
scope: str = Field( scope: str = Field(
default="personal", default="personal",
@ -91,5 +91,9 @@ class ExternalEntry(BaseModel):
isFolder: bool = Field(default=False, description="True if directory/folder") isFolder: bool = Field(default=False, description="True if directory/folder")
size: Optional[int] = Field(default=None, description="File size in bytes") size: Optional[int] = Field(default=None, description="File size in bytes")
mimeType: Optional[str] = Field(default=None, description="MIME type (files only)") mimeType: Optional[str] = Field(default=None, description="MIME type (files only)")
lastModified: Optional[float] = Field(default=None, description="Last modification timestamp") lastModified: Optional[float] = Field(
default=None,
description="Last modification timestamp",
json_schema_extra={"frontend_type": "timestamp"},
)
metadata: Dict[str, Any] = Field(default_factory=dict, description="Provider-specific metadata") metadata: Dict[str, Any] = Field(default_factory=dict, description="Provider-specific metadata")

View file

@ -23,11 +23,11 @@ class FeatureDataSource(PowerOnModel):
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
description="FK to FeatureInstance", description="FK to FeatureInstance",
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
) )
featureCode: str = Field( featureCode: str = Field(
description="Feature code (e.g. trustee, commcoach)", description="Feature code (e.g. trustee, commcoach)",
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}}, json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"}},
) )
tableName: str = Field( tableName: str = Field(
description="Table name from DATA_OBJECTS meta (e.g. TrusteePosition)", description="Table name from DATA_OBJECTS meta (e.g. TrusteePosition)",
@ -44,16 +44,16 @@ class FeatureDataSource(PowerOnModel):
mandateId: str = Field( mandateId: str = Field(
default="", default="",
description="Mandate scope", description="Mandate scope",
json_schema_extra={"label": "Mandant", "fk_target": {"db": "poweron_app", "table": "Mandate"}}, json_schema_extra={"label": "Mandant", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
) )
userId: str = Field( userId: str = Field(
default="", default="",
description="Owner user ID", description="Owner user ID",
json_schema_extra={"label": "Benutzer", "fk_target": {"db": "poweron_app", "table": "User"}}, json_schema_extra={"label": "Benutzer", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
) )
workspaceInstanceId: str = Field( workspaceInstanceId: str = Field(
description="Workspace feature instance where this source is used", description="Workspace feature instance where this source is used",
json_schema_extra={"label": "Workspace", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}, json_schema_extra={"label": "Workspace", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
) )
scope: str = Field( scope: str = Field(
default="personal", default="personal",

View file

@ -43,7 +43,7 @@ class FeatureInstance(PowerOnModel):
"frontend_type": "select", "frontend_type": "select",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}, "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"},
}, },
) )
mandateId: str = Field( mandateId: str = Field(
@ -53,7 +53,7 @@ class FeatureInstance(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
}, },
) )
label: str = Field( label: str = Field(

View file

@ -29,7 +29,7 @@ class FileFolder(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": False, "frontend_required": False,
"fk_target": {"db": "poweron_management", "table": "FileFolder"}, "fk_target": {"db": "poweron_management", "table": "FileFolder", "labelField": "name"},
}, },
) )
mandateId: Optional[str] = Field( mandateId: Optional[str] = Field(
@ -40,7 +40,7 @@ class FileFolder(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
}, },
) )
featureInstanceId: Optional[str] = Field( featureInstanceId: Optional[str] = Field(
@ -51,7 +51,7 @@ class FileFolder(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
}, },
) )
scope: str = Field( scope: str = Field(

View file

@ -30,9 +30,7 @@ class FileItem(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"fk_model": "Mandate", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
"fk_label_field": "label",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
}, },
) )
featureInstanceId: Optional[str] = Field( featureInstanceId: Optional[str] = Field(
@ -43,9 +41,7 @@ class FileItem(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"fk_model": "FeatureInstance", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
"fk_label_field": "label",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
}, },
) )
mimeType: str = Field( mimeType: str = Field(
@ -80,7 +76,7 @@ class FileItem(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": False, "frontend_required": False,
"fk_target": {"db": "poweron_management", "table": "FileFolder"}, "fk_target": {"db": "poweron_management", "table": "FileFolder", "labelField": "name"},
}, },
) )
description: Optional[str] = Field( description: Optional[str] = Field(

View file

@ -37,7 +37,7 @@ class Invitation(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
}, },
) )
featureInstanceId: Optional[str] = Field( featureInstanceId: Optional[str] = Field(
@ -48,7 +48,7 @@ class Invitation(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
}, },
) )
roleIds: List[str] = Field( roleIds: List[str] = Field(
@ -80,7 +80,7 @@ class Invitation(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "User"}, "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
}, },
) )
usedAt: Optional[float] = Field( usedAt: Optional[float] = Field(

View file

@ -30,17 +30,17 @@ class FileContentIndex(PowerOnModel):
) )
userId: str = Field( userId: str = Field(
description="Owner user ID", description="Owner user ID",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
default="", default="",
description="Feature instance scope", description="Feature instance scope",
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}, json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
) )
mandateId: str = Field( mandateId: str = Field(
default="", default="",
description="Mandate scope", description="Mandate scope",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}}, json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
) )
fileName: str = Field( fileName: str = Field(
description="Original file name", description="Original file name",
@ -78,7 +78,7 @@ class FileContentIndex(PowerOnModel):
extractedAt: float = Field( extractedAt: float = Field(
default_factory=getUtcTimestamp, default_factory=getUtcTimestamp,
description="Extraction timestamp", description="Extraction timestamp",
json_schema_extra={"label": "Extrahiert am"}, json_schema_extra={"label": "Extrahiert am", "frontend_type": "timestamp"},
) )
status: str = Field( status: str = Field(
default="pending", default="pending",
@ -116,16 +116,16 @@ class ContentChunk(PowerOnModel):
) )
fileId: str = Field( fileId: str = Field(
description="FK to the source file", description="FK to the source file",
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem"}}, json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"}},
) )
userId: str = Field( userId: str = Field(
description="Owner user ID", description="Owner user ID",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
default="", default="",
description="Feature instance scope", description="Feature instance scope",
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}, json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
) )
contentType: str = Field( contentType: str = Field(
description="Content type: text, image, videostream, audiostream, other", description="Content type: text, image, videostream, audiostream, other",
@ -214,16 +214,16 @@ class WorkflowMemory(PowerOnModel):
) )
workflowId: str = Field( workflowId: str = Field(
description="FK to the workflow", description="FK to the workflow",
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}}, json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
) )
userId: str = Field( userId: str = Field(
description="Owner user ID", description="Owner user ID",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
default="", default="",
description="Feature instance scope", description="Feature instance scope",
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}, json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
) )
key: str = Field( key: str = Field(
description="Key identifier (e.g. 'entity:companyName')", description="Key identifier (e.g. 'entity:companyName')",

View file

@ -31,9 +31,7 @@ class UserMandate(PowerOnModel):
"frontend_type": "select", "frontend_type": "select",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": True, "frontend_required": True,
"fk_model": "User", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
"fk_label_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
}, },
) )
mandateId: str = Field( mandateId: str = Field(
@ -43,9 +41,7 @@ class UserMandate(PowerOnModel):
"frontend_type": "select", "frontend_type": "select",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": True, "frontend_required": True,
"fk_model": "Mandate", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
"fk_label_field": "label",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
}, },
) )
enabled: bool = Field( enabled: bool = Field(
@ -73,9 +69,7 @@ class FeatureAccess(PowerOnModel):
"frontend_type": "select", "frontend_type": "select",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": True, "frontend_required": True,
"fk_model": "User", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
"fk_label_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
}, },
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
@ -85,9 +79,7 @@ class FeatureAccess(PowerOnModel):
"frontend_type": "select", "frontend_type": "select",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": True, "frontend_required": True,
"fk_model": "FeatureInstance", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
"fk_label_field": "label",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
}, },
) )
enabled: bool = Field( enabled: bool = Field(
@ -115,7 +107,7 @@ class UserMandateRole(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "UserMandate"}, "fk_target": {"db": "poweron_app", "table": "UserMandate", "labelField": None},
}, },
) )
roleId: str = Field( roleId: str = Field(
@ -125,9 +117,7 @@ class UserMandateRole(PowerOnModel):
"frontend_type": "select", "frontend_type": "select",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": True, "frontend_required": True,
"fk_model": "Role", "fk_target": {"db": "poweron_app", "table": "Role", "labelField": "roleLabel"},
"fk_label_field": "roleLabel",
"fk_target": {"db": "poweron_app", "table": "Role"},
}, },
) )
@ -150,7 +140,7 @@ class FeatureAccessRole(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureAccess"}, "fk_target": {"db": "poweron_app", "table": "FeatureAccess", "labelField": None},
}, },
) )
roleId: str = Field( roleId: str = Field(
@ -160,8 +150,6 @@ class FeatureAccessRole(PowerOnModel):
"frontend_type": "select", "frontend_type": "select",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": True, "frontend_required": True,
"fk_model": "Role", "fk_target": {"db": "poweron_app", "table": "Role", "labelField": "roleLabel"},
"fk_label_field": "roleLabel",
"fk_target": {"db": "poweron_app", "table": "Role"},
}, },
) )

View file

@ -64,7 +64,7 @@ class MessagingSubscription(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Mandanten-ID", "label": "Mandanten-ID",
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
}, },
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
@ -74,7 +74,7 @@ class MessagingSubscription(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Feature-Instanz-ID", "label": "Feature-Instanz-ID",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
}, },
) )
description: Optional[str] = Field( description: Optional[str] = Field(
@ -131,7 +131,7 @@ class MessagingSubscriptionRegistration(BaseModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Mandanten-ID", "label": "Mandanten-ID",
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
}, },
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
@ -141,7 +141,7 @@ class MessagingSubscriptionRegistration(BaseModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Feature-Instanz-ID", "label": "Feature-Instanz-ID",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
}, },
) )
subscriptionId: str = Field( subscriptionId: str = Field(
@ -160,7 +160,7 @@ class MessagingSubscriptionRegistration(BaseModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Benutzer-ID", "label": "Benutzer-ID",
"fk_target": {"db": "poweron_app", "table": "User"}, "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
}, },
) )
channel: MessagingChannel = Field( channel: MessagingChannel = Field(
@ -249,7 +249,7 @@ class MessagingDelivery(BaseModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Benutzer-ID", "label": "Benutzer-ID",
"fk_target": {"db": "poweron_app", "table": "User"}, "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
}, },
) )
channel: MessagingChannel = Field( channel: MessagingChannel = Field(
@ -296,7 +296,7 @@ class MessagingDelivery(BaseModel):
default=None, default=None,
description="When the delivery was sent (UTC timestamp in seconds)", description="When the delivery was sent (UTC timestamp in seconds)",
json_schema_extra={ json_schema_extra={
"frontend_type": "datetime", "frontend_type": "timestamp",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Gesendet am", "label": "Gesendet am",

View file

@ -65,7 +65,7 @@ class UserNotification(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"}, "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
}, },
) )

View file

@ -63,9 +63,7 @@ class Role(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_visible": True, "frontend_visible": True,
"frontend_required": False, "frontend_required": False,
"fk_model": "Mandate", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
"fk_label_field": "label",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
}, },
) )
featureInstanceId: Optional[str] = Field( featureInstanceId: Optional[str] = Field(
@ -77,9 +75,7 @@ class Role(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_visible": True, "frontend_visible": True,
"frontend_required": False, "frontend_required": False,
"fk_model": "FeatureInstance", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
"fk_label_field": "label",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
}, },
) )
featureCode: Optional[str] = Field( featureCode: Optional[str] = Field(
@ -115,9 +111,7 @@ class AccessRule(PowerOnModel):
"frontend_type": "select", "frontend_type": "select",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_model": "Role", "fk_target": {"db": "poweron_app", "table": "Role", "labelField": "roleLabel"},
"fk_label_field": "roleLabel",
"fk_target": {"db": "poweron_app", "table": "Role"},
}, },
) )
context: AccessRuleContext = Field( context: AccessRuleContext = Field(

View file

@ -47,7 +47,7 @@ class Token(PowerOnModel):
) )
userId: str = Field( userId: str = Field(
..., ...,
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
) )
authority: AuthAuthority = Field( authority: AuthAuthority = Field(
..., ...,
@ -56,7 +56,7 @@ class Token(PowerOnModel):
connectionId: Optional[str] = Field( connectionId: Optional[str] = Field(
None, None,
description="ID of the connection this token belongs to", description="ID of the connection this token belongs to",
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection"}}, json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection", "labelField": "externalUsername"}},
) )
tokenPurpose: Optional[TokenPurpose] = Field( tokenPurpose: Optional[TokenPurpose] = Field(
default=None, default=None,
@ -73,7 +73,7 @@ class Token(PowerOnModel):
) )
expiresAt: float = Field( expiresAt: float = Field(
description="When the token expires (UTC timestamp in seconds)", description="When the token expires (UTC timestamp in seconds)",
json_schema_extra={"label": "Laeuft ab am"}, json_schema_extra={"label": "Laeuft ab am", "frontend_type": "timestamp"},
) )
tokenRefresh: Optional[str] = Field( tokenRefresh: Optional[str] = Field(
default=None, default=None,
@ -87,12 +87,12 @@ class Token(PowerOnModel):
revokedAt: Optional[float] = Field( revokedAt: Optional[float] = Field(
None, None,
description="When the token was revoked (UTC timestamp in seconds)", description="When the token was revoked (UTC timestamp in seconds)",
json_schema_extra={"label": "Widerrufen am"}, json_schema_extra={"label": "Widerrufen am", "frontend_type": "timestamp"},
) )
revokedBy: Optional[str] = Field( revokedBy: Optional[str] = Field(
None, None,
description="User ID who revoked the token (admin/self)", description="User ID who revoked the token (admin/self)",
json_schema_extra={"label": "Widerrufen von", "fk_target": {"db": "poweron_app", "table": "User"}}, json_schema_extra={"label": "Widerrufen von", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
) )
reason: Optional[str] = Field( reason: Optional[str] = Field(
None, None,
@ -139,7 +139,7 @@ class AuthEvent(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"}, "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
}, },
) )
eventType: str = Field( eventType: str = Field(
@ -149,7 +149,7 @@ class AuthEvent(PowerOnModel):
timestamp: float = Field( timestamp: float = Field(
default_factory=getUtcTimestamp, default_factory=getUtcTimestamp,
description="Unix timestamp when the event occurred", description="Unix timestamp when the event occurred",
json_schema_extra={"label": "Zeitstempel", "frontend_type": "datetime", "frontend_readonly": True, "frontend_required": True}, json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": True},
) )
ipAddress: Optional[str] = Field( ipAddress: Optional[str] = Field(
default=None, default=None,

View file

@ -207,7 +207,7 @@ class MandateSubscription(PowerOnModel):
mandateId: str = Field( mandateId: str = Field(
..., ...,
description="Foreign key to Mandate", description="Foreign key to Mandate",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}}, json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
) )
planKey: str = Field( planKey: str = Field(
..., ...,
@ -226,35 +226,35 @@ class MandateSubscription(PowerOnModel):
json_schema_extra={"label": "Wiederkehrend"}, json_schema_extra={"label": "Wiederkehrend"},
) )
startedAt: datetime = Field( startedAt: float = Field(
default_factory=lambda: datetime.now(timezone.utc), default_factory=lambda: datetime.now(timezone.utc).timestamp(),
description="Record creation timestamp", description="Record creation timestamp (UTC unix)",
json_schema_extra={"label": "Gestartet"}, json_schema_extra={"label": "Gestartet", "frontend_type": "timestamp"},
) )
effectiveFrom: Optional[datetime] = Field( effectiveFrom: Optional[float] = Field(
None, None,
description="When this subscription becomes operative. None = immediate. Set for SCHEDULED subs.", description="When this subscription becomes operative (UTC unix). None = immediate.",
json_schema_extra={"label": "Wirksam ab"}, json_schema_extra={"label": "Wirksam ab", "frontend_type": "timestamp"},
) )
endedAt: Optional[datetime] = Field( endedAt: Optional[float] = Field(
None, None,
description="When subscription ended (terminal)", description="When subscription ended (UTC unix)",
json_schema_extra={"label": "Beendet"}, json_schema_extra={"label": "Beendet", "frontend_type": "timestamp"},
) )
currentPeriodStart: Optional[datetime] = Field( currentPeriodStart: Optional[float] = Field(
None, None,
description="Current billing period start (synced from Stripe)", description="Current billing period start (UTC unix, synced from Stripe)",
json_schema_extra={"label": "Periodenbeginn"}, json_schema_extra={"label": "Periodenbeginn", "frontend_type": "timestamp"},
) )
currentPeriodEnd: Optional[datetime] = Field( currentPeriodEnd: Optional[float] = Field(
None, None,
description="Current billing period end (synced from Stripe)", description="Current billing period end (UTC unix, synced from Stripe)",
json_schema_extra={"label": "Periodenende"}, json_schema_extra={"label": "Periodenende", "frontend_type": "timestamp"},
) )
trialEndsAt: Optional[datetime] = Field( trialEndsAt: Optional[float] = Field(
None, None,
description="Trial expiry timestamp", description="Trial expiry timestamp (UTC unix)",
json_schema_extra={"label": "Trial endet"}, json_schema_extra={"label": "Trial endet", "frontend_type": "timestamp"},
) )
snapshotPricePerUserCHF: float = Field( snapshotPricePerUserCHF: float = Field(

View file

@ -397,9 +397,7 @@ class UserConnection(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Benutzer-ID", "label": "Benutzer-ID",
"fk_model": "User", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
"fk_label_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
}, },
) )
authority: AuthAuthority = Field( authority: AuthAuthority = Field(
@ -648,7 +646,7 @@ class UserInDB(User):
resetTokenExpires: Optional[float] = Field( resetTokenExpires: Optional[float] = Field(
None, None,
description="Reset token expiration (UTC timestamp in seconds)", description="Reset token expiration (UTC timestamp in seconds)",
json_schema_extra={"label": "Token läuft ab"}, json_schema_extra={"label": "Token läuft ab", "frontend_type": "timestamp"},
) )
@ -689,12 +687,12 @@ class UserVoicePreferences(PowerOnModel):
) )
userId: str = Field( userId: str = Field(
description="User ID", description="User ID",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
) )
mandateId: Optional[str] = Field( mandateId: Optional[str] = Field(
default=None, default=None,
description="Mandate scope (None = global for user)", description="Mandate scope (None = global for user)",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}}, json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
) )
sttLanguage: str = Field( sttLanguage: str = Field(
default="de-DE", default="de-DE",

View file

@ -14,8 +14,8 @@ from modules.datamodels.datamodelExtraction import ContentExtracted, ContentPart
class UdmMetadata(BaseModel): class UdmMetadata(BaseModel):
title: Optional[str] = None title: Optional[str] = None
author: Optional[str] = None author: Optional[str] = None
createdAt: Optional[str] = None createdAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
modifiedAt: Optional[str] = None modifiedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
sourcePath: str = "" sourcePath: str = ""
tags: List[str] = Field(default_factory=list) tags: List[str] = Field(default_factory=list)
custom: Dict[str, Any] = Field(default_factory=dict) custom: Dict[str, Any] = Field(default_factory=dict)

View file

@ -27,9 +27,7 @@ class Prompt(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"fk_model": "Mandate", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
"fk_label_field": "label",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
}, },
) )
isSystem: bool = Field( isSystem: bool = Field(

View file

@ -0,0 +1,199 @@
# Copyright (c) 2025 Patrick Motsch
# All rights reserved.
"""
View models for the /api/attributes/ endpoint.
These extend base DB models with computed / enriched fields that the gateway
adds at response time (JOINs, aggregations, synthetics). They are NEVER used
for DB operations only for ``getModelAttributeDefinitions()`` so the frontend
can resolve column types via ``resolveColumnTypes`` without hardcoding.
Naming convention: ``{BaseModel}View``.
``getModelClasses()`` in ``attributeUtils.py`` auto-discovers every
``datamodel*.py`` under ``modules/datamodels/`` so placing them here is
sufficient for registration.
"""
from typing import Optional, List
from pydantic import Field
from modules.datamodels.datamodelBase import MODEL_REGISTRY, PowerOnModel
from modules.datamodels.datamodelMembership import UserMandate, FeatureAccess
from modules.datamodels.datamodelBilling import BillingTransaction
from modules.datamodels.datamodelSubscription import MandateSubscription
from modules.datamodels.datamodelUiLanguage import UiLanguageSet
from modules.features.neutralization.datamodelFeatureNeutralizer import DataNeutralizerAttributes
from modules.shared.i18nRegistry import i18nModel
# ============================================================================
# Punkt 1a: UserMandate + enriched user fields
# ============================================================================
@i18nModel("Benutzer-Mandant (Ansicht)")
class UserMandateView(UserMandate):
"""UserMandate erweitert um aufgeloeste Benutzerfelder und Rollenlabels."""
username: Optional[str] = Field(
default=None,
description="Username (resolved from userId)",
json_schema_extra={"label": "Benutzername", "frontend_type": "text", "frontend_readonly": True},
)
email: Optional[str] = Field(
default=None,
description="E-Mail address (resolved from userId)",
json_schema_extra={"label": "E-Mail", "frontend_type": "text", "frontend_readonly": True},
)
fullName: Optional[str] = Field(
default=None,
description="Full name (resolved from userId)",
json_schema_extra={"label": "Vollstaendiger Name", "frontend_type": "text", "frontend_readonly": True},
)
roleLabels: Optional[List[str]] = Field(
default=None,
description="Role labels (resolved from junction table)",
json_schema_extra={"label": "Rollen", "frontend_type": "text", "frontend_readonly": True},
)
# ============================================================================
# Punkt 1b: FeatureAccess + enriched user fields
# ============================================================================
@i18nModel("Feature-Zugang (Ansicht)")
class FeatureAccessView(FeatureAccess):
"""FeatureAccess erweitert um aufgeloeste Benutzerfelder und Rollenlabels."""
username: Optional[str] = Field(
default=None,
description="Username (resolved from userId)",
json_schema_extra={"label": "Benutzername", "frontend_type": "text", "frontend_readonly": True},
)
email: Optional[str] = Field(
default=None,
description="E-Mail address (resolved from userId)",
json_schema_extra={"label": "E-Mail", "frontend_type": "text", "frontend_readonly": True},
)
fullName: Optional[str] = Field(
default=None,
description="Full name (resolved from userId)",
json_schema_extra={"label": "Vollstaendiger Name", "frontend_type": "text", "frontend_readonly": True},
)
roleLabels: Optional[List[str]] = Field(
default=None,
description="Role labels (resolved from junction table)",
json_schema_extra={"label": "Rollen", "frontend_type": "text", "frontend_readonly": True},
)
# ============================================================================
# Punkt 1d: BillingTransaction + enriched mandate/user names
# ============================================================================
@i18nModel("Transaktion (Ansicht)")
class BillingTransactionView(BillingTransaction):
"""BillingTransaction erweitert um aufgeloeste Mandanten-/Benutzernamen."""
mandateName: Optional[str] = Field(
default=None,
description="Mandate name (resolved from accountId/mandateId)",
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True},
)
userName: Optional[str] = Field(
default=None,
description="User name (resolved from createdByUserId)",
json_schema_extra={"label": "Benutzer", "frontend_type": "text", "frontend_readonly": True},
)
# ============================================================================
# Punkt 3a: MandateSubscription + aggregated fields
# ============================================================================
@i18nModel("Abonnement (Ansicht)")
class MandateSubscriptionView(MandateSubscription):
"""MandateSubscription erweitert um aggregierte Laufzeitwerte."""
mandateName: Optional[str] = Field(
default=None,
description="Mandate name (resolved from mandateId)",
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True},
)
planTitle: Optional[str] = Field(
default=None,
description="Plan title (resolved from planKey)",
json_schema_extra={"label": "Plan", "frontend_type": "text", "frontend_readonly": True},
)
activeUsers: Optional[int] = Field(
default=None,
description="Number of active users in the mandate",
json_schema_extra={"label": "Benutzer", "frontend_type": "number", "frontend_readonly": True},
)
activeInstances: Optional[int] = Field(
default=None,
description="Number of active feature instances in the mandate",
json_schema_extra={"label": "Module", "frontend_type": "number", "frontend_readonly": True},
)
monthlyRevenueCHF: Optional[float] = Field(
default=None,
description="Calculated monthly revenue in CHF",
json_schema_extra={"label": "Umsatz pro Monat", "frontend_type": "number", "frontend_readonly": True},
)
# ============================================================================
# Punkt 3b: UiLanguageSet + computed counts
# ============================================================================
@i18nModel("Sprachset (Ansicht)")
class UiLanguageSetView(UiLanguageSet):
"""UiLanguageSet erweitert um berechnete Uebersetzungszaehler."""
uiCount: Optional[int] = Field(
default=None,
description="Number of UI translation entries",
json_schema_extra={"label": "UI", "frontend_type": "number", "frontend_readonly": True},
)
gatewayCount: Optional[int] = Field(
default=None,
description="Number of gateway/API translation entries",
json_schema_extra={"label": "API", "frontend_type": "number", "frontend_readonly": True},
)
entriesCount: Optional[int] = Field(
default=None,
description="Total number of translation entries",
json_schema_extra={"label": "Gesamt", "frontend_type": "number", "frontend_readonly": True},
)
# ============================================================================
# Punkt 1c: DataNeutralizerAttributes + enriched fields
#
# DataNeutralizerAttributes extends BaseModel (not PowerOnModel), so its
# subclass does NOT auto-register in MODEL_REGISTRY. We register manually.
# ============================================================================
@i18nModel("Neutralisierungs-Zuordnung (Ansicht)")
class DataNeutralizerAttributesView(DataNeutralizerAttributes):
"""DataNeutralizerAttributes erweitert um synthetische/aufgeloeste Felder."""
placeholder: Optional[str] = Field(
default=None,
description="Synthetic placeholder string [patternType.id]",
json_schema_extra={"label": "Platzhalter", "frontend_type": "text", "frontend_readonly": True},
)
username: Optional[str] = Field(
default=None,
description="Username (resolved from userId)",
json_schema_extra={"label": "Benutzer", "frontend_type": "text", "frontend_readonly": True},
)
instanceLabel: Optional[str] = Field(
default=None,
description="Feature instance label (resolved from featureInstanceId)",
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True},
)
# Manual registration for non-PowerOnModel view
MODEL_REGISTRY["DataNeutralizerAttributesView"] = DataNeutralizerAttributesView # type: ignore[assignment]

View file

@ -503,11 +503,12 @@ class PwgDemo2026(_BaseDemoConfig):
if monthlyRent <= 0: if monthlyRent <= 0:
continue continue
for month in range(1, 13): for month in range(1, 13):
bookingDate = f"{year}-{month:02d}-01" from datetime import datetime as _dtCls, timezone as _tzCls
bookingTs = _dtCls(year, month, 1, tzinfo=_tzCls.utc).timestamp()
entryRef = f"PWG-{tenant.get('contactNumber')}-{year}{month:02d}" entryRef = f"PWG-{tenant.get('contactNumber')}-{year}{month:02d}"
entry = TrusteeDataJournalEntry( entry = TrusteeDataJournalEntry(
externalId=entryRef, externalId=entryRef,
bookingDate=bookingDate, bookingDate=bookingTs,
reference=entryRef, reference=entryRef,
description=f"Mietzins {month:02d}/{year} {name}", description=f"Mietzins {month:02d}/{year} {name}",
currency="CHF", currency="CHF",

View file

@ -35,17 +35,6 @@ from modules.features.chatbot.mainChatbot import getEventManager
from modules.shared.i18nRegistry import apiRouteContext from modules.shared.i18nRegistry import apiRouteContext
routeApiMsg = apiRouteContext("routeFeatureChatbot") routeApiMsg = apiRouteContext("routeFeatureChatbot")
# Pre-warm AI connectors when this router loads (before first request).
# Ensures connectors are ready; avoids 48 s delay on first chatbot message.
try:
import modules.aicore.aicoreModelRegistry # noqa: F401
from modules.aicore.aicoreModelRegistry import modelRegistry
modelRegistry.ensureConnectorsRegistered()
modelRegistry.refreshModels(force=True)
logging.getLogger(__name__).info("Chatbot router: AI connectors pre-warmed")
except Exception as e:
logging.getLogger(__name__).warning(f"Chatbot AI pre-warm failed: {e}")
# Configure logger # Configure logger
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View file

@ -90,7 +90,7 @@ class CoachingContext(PowerOnModel):
metadata: Optional[str] = Field(default=None, description="JSON object with flexible metadata") metadata: Optional[str] = Field(default=None, description="JSON object with flexible metadata")
sessionCount: int = Field(default=0) sessionCount: int = Field(default=0)
taskCount: int = Field(default=0) taskCount: int = Field(default=0)
lastSessionAt: Optional[str] = Field(default=None) lastSessionAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
rollingOverview: Optional[str] = Field(default=None, description="AI summary of older sessions for long context history") rollingOverview: Optional[str] = Field(default=None, description="AI summary of older sessions for long context history")
rollingOverviewUpToSessionCount: Optional[int] = Field(default=None, description="Session count covered by rollingOverview") rollingOverviewUpToSessionCount: Optional[int] = Field(default=None, description="Session count covered by rollingOverview")
@ -113,8 +113,8 @@ class CoachingSession(PowerOnModel):
messageCount: int = Field(default=0) messageCount: int = Field(default=0)
competenceScore: Optional[float] = Field(default=None, ge=0.0, le=100.0) competenceScore: Optional[float] = Field(default=None, ge=0.0, le=100.0)
emailSent: bool = Field(default=False) emailSent: bool = Field(default=False)
startedAt: Optional[str] = Field(default=None) startedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
endedAt: Optional[str] = Field(default=None) endedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
class CoachingMessage(PowerOnModel): class CoachingMessage(PowerOnModel):
@ -141,8 +141,8 @@ class CoachingTask(PowerOnModel):
description: Optional[str] = Field(default=None) description: Optional[str] = Field(default=None)
status: CoachingTaskStatus = Field(default=CoachingTaskStatus.OPEN) status: CoachingTaskStatus = Field(default=CoachingTaskStatus.OPEN)
priority: CoachingTaskPriority = Field(default=CoachingTaskPriority.MEDIUM) priority: CoachingTaskPriority = Field(default=CoachingTaskPriority.MEDIUM)
dueDate: Optional[str] = Field(default=None) dueDate: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "date"})
completedAt: Optional[str] = Field(default=None) completedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
class CoachingScore(PowerOnModel): class CoachingScore(PowerOnModel):
@ -171,7 +171,7 @@ class CoachingUserProfile(PowerOnModel):
longestStreak: int = Field(default=0) longestStreak: int = Field(default=0)
totalSessions: int = Field(default=0) totalSessions: int = Field(default=0)
totalMinutes: int = Field(default=0) totalMinutes: int = Field(default=0)
lastSessionAt: Optional[str] = Field(default=None) lastSessionAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
# ============================================================================ # ============================================================================
@ -204,7 +204,7 @@ class CoachingBadge(PowerOnModel):
mandateId: str = Field(description="Mandate ID") mandateId: str = Field(description="Mandate ID")
instanceId: str = Field(description="Feature instance ID") instanceId: str = Field(description="Feature instance ID")
badgeKey: str = Field(description="Badge identifier, e.g. 'streak_7'") badgeKey: str = Field(description="Badge identifier, e.g. 'streak_7'")
awardedAt: Optional[str] = Field(default=None) awardedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
# ============================================================================ # ============================================================================
@ -238,14 +238,14 @@ class CreateTaskRequest(BaseModel):
title: str title: str
description: Optional[str] = None description: Optional[str] = None
priority: Optional[CoachingTaskPriority] = CoachingTaskPriority.MEDIUM priority: Optional[CoachingTaskPriority] = CoachingTaskPriority.MEDIUM
dueDate: Optional[str] = None dueDate: Optional[float] = None
class UpdateTaskRequest(BaseModel): class UpdateTaskRequest(BaseModel):
title: Optional[str] = None title: Optional[str] = None
description: Optional[str] = None description: Optional[str] = None
priority: Optional[CoachingTaskPriority] = None priority: Optional[CoachingTaskPriority] = None
dueDate: Optional[str] = None dueDate: Optional[float] = None
class UpdateTaskStatusRequest(BaseModel): class UpdateTaskStatusRequest(BaseModel):

View file

@ -12,7 +12,7 @@ from typing import Dict, Any, List, Optional
from modules.datamodels.datamodelUam import User from modules.datamodels.datamodelUam import User
from modules.connectors.connectorDbPostgre import DatabaseConnector from modules.connectors.connectorDbPostgre import DatabaseConnector
from modules.shared.dbRegistry import registerDatabase from modules.shared.dbRegistry import registerDatabase
from modules.shared.timeUtils import getIsoTimestamp from modules.shared.timeUtils import getIsoTimestamp, getUtcTimestamp
from modules.shared.configuration import APP_CONFIG from modules.shared.configuration import APP_CONFIG
from modules.shared.i18nRegistry import resolveText, t from modules.shared.i18nRegistry import resolveText, t
@ -112,7 +112,7 @@ class CommcoachObjects:
CoachingSession, CoachingSession,
recordFilter={"contextId": contextId, "userId": userId}, recordFilter={"contextId": contextId, "userId": userId},
) )
records.sort(key=lambda r: r.get("startedAt") or r.get("createdAt") or "", reverse=True) records.sort(key=lambda r: r.get("startedAt") or 0, reverse=True)
return records return records
def getSession(self, sessionId: str) -> Optional[Dict[str, Any]]: def getSession(self, sessionId: str) -> Optional[Dict[str, Any]]:
@ -129,7 +129,7 @@ class CommcoachObjects:
def createSession(self, data: Dict[str, Any]) -> Dict[str, Any]: def createSession(self, data: Dict[str, Any]) -> Dict[str, Any]:
data["createdAt"] = getIsoTimestamp() data["createdAt"] = getIsoTimestamp()
data["updatedAt"] = getIsoTimestamp() data["updatedAt"] = getIsoTimestamp()
data["startedAt"] = getIsoTimestamp() data["startedAt"] = getUtcTimestamp()
return self.db.recordCreate(CoachingSession, data) return self.db.recordCreate(CoachingSession, data)
def updateSession(self, sessionId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]: def updateSession(self, sessionId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
@ -281,7 +281,7 @@ class CommcoachObjects:
def getBadges(self, userId: str, instanceId: str) -> List[Dict[str, Any]]: def getBadges(self, userId: str, instanceId: str) -> List[Dict[str, Any]]:
from .datamodelCommcoach import CoachingBadge from .datamodelCommcoach import CoachingBadge
records = self.db.getRecordset(CoachingBadge, recordFilter={"userId": userId, "instanceId": instanceId}) records = self.db.getRecordset(CoachingBadge, recordFilter={"userId": userId, "instanceId": instanceId})
records.sort(key=lambda r: r.get("awardedAt") or "", reverse=True) records.sort(key=lambda r: r.get("awardedAt") or 0, reverse=True)
return records return records
def hasBadge(self, userId: str, instanceId: str, badgeKey: str) -> bool: def hasBadge(self, userId: str, instanceId: str, badgeKey: str) -> bool:
@ -291,7 +291,7 @@ class CommcoachObjects:
def awardBadge(self, data: Dict[str, Any]) -> Dict[str, Any]: def awardBadge(self, data: Dict[str, Any]) -> Dict[str, Any]:
from .datamodelCommcoach import CoachingBadge from .datamodelCommcoach import CoachingBadge
data["awardedAt"] = getIsoTimestamp() data["awardedAt"] = getUtcTimestamp()
data["createdAt"] = getIsoTimestamp() data["createdAt"] = getIsoTimestamp()
return self.db.recordCreate(CoachingBadge, data) return self.db.recordCreate(CoachingBadge, data)

View file

@ -471,10 +471,10 @@ async def cancelSession(
raise HTTPException(status_code=404, detail=routeApiMsg("Session not found")) raise HTTPException(status_code=404, detail=routeApiMsg("Session not found"))
_validateOwnership(session, context) _validateOwnership(session, context)
from modules.shared.timeUtils import getIsoTimestamp from modules.shared.timeUtils import getUtcTimestamp
interface.updateSession(sessionId, { interface.updateSession(sessionId, {
"status": CoachingSessionStatus.CANCELLED.value, "status": CoachingSessionStatus.CANCELLED.value,
"endedAt": getIsoTimestamp(), "endedAt": getUtcTimestamp(),
}) })
return {"cancelled": True} return {"cancelled": True}
@ -768,8 +768,8 @@ async def updateTaskStatus(
updates = {"status": body.status.value} updates = {"status": body.status.value}
if body.status == CoachingTaskStatus.DONE: if body.status == CoachingTaskStatus.DONE:
from modules.shared.timeUtils import getIsoTimestamp from modules.shared.timeUtils import getUtcTimestamp
updates["completedAt"] = getIsoTimestamp() updates["completedAt"] = getUtcTimestamp()
updated = interface.updateTask(taskId, updates) updated = interface.updateTask(taskId, updates)
return {"task": updated} return {"task": updated}

View file

@ -14,7 +14,7 @@ from typing import Optional, Dict, Any, List
from modules.datamodels.datamodelUam import User from modules.datamodels.datamodelUam import User
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
from modules.shared.timeUtils import getIsoTimestamp from modules.shared.timeUtils import getIsoTimestamp, getUtcTimestamp
from .datamodelCommcoach import ( from .datamodelCommcoach import (
CoachingMessage, CoachingMessageRole, CoachingMessageContentType, CoachingMessage, CoachingMessageRole, CoachingMessageContentType,
@ -1107,7 +1107,7 @@ class CommcoachService:
if len(messages) < 2: if len(messages) < 2:
interface.updateSession(sessionId, { interface.updateSession(sessionId, {
"status": CoachingSessionStatus.COMPLETED.value, "status": CoachingSessionStatus.COMPLETED.value,
"endedAt": getIsoTimestamp(), "endedAt": getUtcTimestamp(),
"compressedHistorySummary": None, "compressedHistorySummary": None,
"compressedHistoryUpToMessageCount": None, "compressedHistoryUpToMessageCount": None,
}) })
@ -1252,21 +1252,18 @@ class CommcoachService:
logger.warning(f"Coaching session indexing failed (non-blocking): {e}") logger.warning(f"Coaching session indexing failed (non-blocking): {e}")
# Calculate duration # Calculate duration
startedAt = session.get("startedAt", "") startedAt = session.get("startedAt")
durationSeconds = 0 durationSeconds = 0
if startedAt: if startedAt:
try: from datetime import datetime, timezone
from datetime import datetime start = datetime.fromtimestamp(startedAt, tz=timezone.utc)
start = datetime.fromisoformat(startedAt.replace("Z", "+00:00")) end = datetime.now(timezone.utc)
end = datetime.now(start.tzinfo) if start.tzinfo else datetime.now() durationSeconds = int((end - start).total_seconds())
durationSeconds = int((end - start).total_seconds())
except Exception:
pass
# Update session - clear compressed history so it never leaks into new sessions # Update session - clear compressed history so it never leaks into new sessions
sessionUpdates = { sessionUpdates = {
"status": CoachingSessionStatus.COMPLETED.value, "status": CoachingSessionStatus.COMPLETED.value,
"endedAt": getIsoTimestamp(), "endedAt": getUtcTimestamp(),
"summary": summary, "summary": summary,
"durationSeconds": durationSeconds, "durationSeconds": durationSeconds,
"messageCount": len(messages), "messageCount": len(messages),
@ -1285,7 +1282,7 @@ class CommcoachService:
completedCount = len([s for s in allSessions if s.get("status") == CoachingSessionStatus.COMPLETED.value]) completedCount = len([s for s in allSessions if s.get("status") == CoachingSessionStatus.COMPLETED.value])
interface.updateContext(contextId, { interface.updateContext(contextId, {
"sessionCount": completedCount, "sessionCount": completedCount,
"lastSessionAt": getIsoTimestamp(), "lastSessionAt": getUtcTimestamp(),
}) })
# Update user profile streak # Update user profile streak
@ -1324,26 +1321,23 @@ class CommcoachService:
if not profile: if not profile:
profile = interface.getOrCreateProfile(self.userId, self.mandateId, self.instanceId) profile = interface.getOrCreateProfile(self.userId, self.mandateId, self.instanceId)
from datetime import datetime, timedelta from datetime import datetime, timezone
lastSessionAt = profile.get("lastSessionAt") lastSessionAt = profile.get("lastSessionAt")
currentStreak = profile.get("streakDays", 0) currentStreak = profile.get("streakDays", 0)
longestStreak = profile.get("longestStreak", 0) longestStreak = profile.get("longestStreak", 0)
totalSessions = profile.get("totalSessions", 0) totalSessions = profile.get("totalSessions", 0)
today = datetime.now().date() today = datetime.now(timezone.utc).date()
isConsecutive = False isConsecutive = False
if lastSessionAt: if lastSessionAt:
try: lastDate = datetime.fromtimestamp(lastSessionAt, tz=timezone.utc).date()
lastDate = datetime.fromisoformat(lastSessionAt.replace("Z", "+00:00")).date() diff = (today - lastDate).days
diff = (today - lastDate).days if diff == 1:
if diff == 1: isConsecutive = True
isConsecutive = True elif diff == 0:
elif diff == 0: isConsecutive = True
isConsecutive = True # Same day, maintain streak
except Exception:
pass
newStreak = (currentStreak + 1) if isConsecutive else 1 newStreak = (currentStreak + 1) if isConsecutive else 1
newLongest = max(longestStreak, newStreak) newLongest = max(longestStreak, newStreak)
@ -1352,7 +1346,7 @@ class CommcoachService:
"streakDays": newStreak, "streakDays": newStreak,
"longestStreak": newLongest, "longestStreak": newLongest,
"totalSessions": totalSessions + 1, "totalSessions": totalSessions + 1,
"lastSessionAt": getIsoTimestamp(), "lastSessionAt": getUtcTimestamp(),
}) })
except Exception as e: except Exception as e:
logger.warning(f"Failed to update streak: {e}") logger.warning(f"Failed to update streak: {e}")
@ -1418,14 +1412,13 @@ class CommcoachService:
completedSessions = [s for s in allSessions if s.get("status") == CoachingSessionStatus.COMPLETED.value] completedSessions = [s for s in allSessions if s.get("status") == CoachingSessionStatus.COMPLETED.value]
for s in completedSessions: for s in completedSessions:
startedAt = s.get("startedAt") or s.get("createdAt") or "" startedAt = s.get("startedAt")
if startedAt: if startedAt:
try: from datetime import datetime, timezone
from datetime import datetime dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
dt = datetime.fromisoformat(str(startedAt).replace("Z", "+00:00")) s["date"] = dt.strftime("%d.%m.%Y")
s["date"] = dt.strftime("%d.%m.%Y") else:
except Exception: s["date"] = ""
s["date"] = ""
result = { result = {
"intent": intent, "intent": intent,

View file

@ -206,14 +206,11 @@ Tool-Nutzung:
if retrievedSession: if retrievedSession:
dateStr = "" dateStr = ""
startedAt = retrievedSession.get("startedAt") or retrievedSession.get("createdAt") startedAt = retrievedSession.get("startedAt")
if startedAt: if startedAt:
try: from datetime import datetime, timezone
from datetime import datetime dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
dt = datetime.fromisoformat(str(startedAt).replace("Z", "+00:00")) dateStr = dt.strftime("%d.%m.%Y")
dateStr = dt.strftime("%d.%m.%Y")
except Exception:
pass
prompt += f"\n\nVom Benutzer angefragte Session ({dateStr}):" prompt += f"\n\nVom Benutzer angefragte Session ({dateStr}):"
prompt += f"\n{retrievedSession.get('summary', '')[:500]}" prompt += f"\n{retrievedSession.get('summary', '')[:500]}"

View file

@ -7,7 +7,7 @@ Intent detection, retrieval strategies, and context assembly for intelligent ses
import re import re
import logging import logging
from datetime import datetime from datetime import datetime, timezone
from typing import Optional, Dict, Any, List, Tuple from typing import Optional, Dict, Any, List, Tuple
from enum import Enum from enum import Enum
@ -106,18 +106,15 @@ def findSessionByDate(
for s in sessions: for s in sessions:
if s.get("status") != "completed": if s.get("status") != "completed":
continue continue
startedAt = s.get("startedAt") or s.get("endedAt") or s.get("createdAt") startedAt = s.get("startedAt") or s.get("endedAt")
if not startedAt: if not startedAt:
continue continue
try: dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
dt = datetime.fromisoformat(startedAt.replace("Z", "+00:00")) sessionDate = dt.date()
sessionDate = dt.date() diff = abs((sessionDate - targetDateOnly).days)
diff = abs((sessionDate - targetDateOnly).days) if bestDiff is None or diff < bestDiff:
if bestDiff is None or diff < bestDiff: bestDiff = diff
bestDiff = diff bestMatch = s
bestMatch = s
except Exception:
continue
return bestMatch return bestMatch
@ -231,17 +228,14 @@ def buildSessionSummariesForPrompt(
and s.get("summary") and s.get("summary")
and s.get("id") != excludeSessionId and s.get("id") != excludeSessionId
] ]
completed.sort(key=lambda x: x.get("startedAt") or x.get("createdAt") or "", reverse=True) completed.sort(key=lambda x: x.get("startedAt") or 0, reverse=True)
result = [] result = []
for s in completed[:limit]: for s in completed[:limit]:
startedAt = s.get("startedAt") or s.get("createdAt") or "" startedAt = s.get("startedAt")
dateStr = "" dateStr = ""
if startedAt: if startedAt:
try: dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
dt = datetime.fromisoformat(startedAt.replace("Z", "+00:00")) dateStr = dt.strftime("%d.%m.%Y")
dateStr = dt.strftime("%d.%m.%Y")
except Exception:
pass
result.append({ result.append({
"summary": s.get("summary", ""), "summary": s.get("summary", ""),
"date": dateStr, "date": dateStr,

View file

@ -8,7 +8,7 @@ Generates Markdown and PDF exports for dossiers and sessions.
import logging import logging
import json import json
from typing import Dict, Any, List, Optional from typing import Dict, Any, List, Optional
from datetime import datetime from datetime import datetime, timezone
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -49,7 +49,7 @@ def buildDossierMarkdown(context: Dict[str, Any], sessions: List[Dict[str, Any]]
lines.append(f"- {text}") lines.append(f"- {text}")
completedSessions = [s for s in sessions if s.get("status") == "completed"] completedSessions = [s for s in sessions if s.get("status") == "completed"]
completedSessions.sort(key=lambda s: s.get("startedAt") or s.get("createdAt") or "") completedSessions.sort(key=lambda s: s.get("startedAt") or 0)
if completedSessions: if completedSessions:
lines += ["", "## Sessions", ""] lines += ["", "## Sessions", ""]
for i, s in enumerate(completedSessions, 1): for i, s in enumerate(completedSessions, 1):
@ -227,14 +227,14 @@ def _mdToXml(text: str) -> str:
def _formatDate(isoStr: Optional[str]) -> str: def _formatDate(val) -> str:
if not isoStr: if not val:
return datetime.now().strftime("%d.%m.%Y") return datetime.now(timezone.utc).strftime("%d.%m.%Y")
try: if isinstance(val, (int, float)):
dt = datetime.fromisoformat(str(isoStr).replace("Z", "+00:00")) dt = datetime.fromtimestamp(float(val), tz=timezone.utc)
return dt.strftime("%d.%m.%Y") return dt.strftime("%d.%m.%Y")
except Exception: dt = datetime.fromisoformat(str(val).replace("Z", "+00:00"))
return isoStr return dt.strftime("%d.%m.%Y")
def _parseJson(value, fallback): def _parseJson(value, fallback):

View file

@ -68,9 +68,7 @@ class AutoWorkflow(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Mandanten-ID", "label": "Mandanten-ID",
"fk_label_field": "label", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
"fk_model": "Mandate",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
}, },
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
@ -80,9 +78,7 @@ class AutoWorkflow(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Feature-Instanz-ID", "label": "Feature-Instanz-ID",
"fk_label_field": "label", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
"fk_model": "FeatureInstance",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
}, },
) )
label: str = Field( label: str = Field(
@ -112,7 +108,7 @@ class AutoWorkflow(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Vorlagen-Quelle", "label": "Vorlagen-Quelle",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"}, "fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
}, },
) )
templateScope: Optional[str] = Field( templateScope: Optional[str] = Field(
@ -133,7 +129,7 @@ class AutoWorkflow(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Aktuelle Version", "label": "Aktuelle Version",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion"}, "fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion", "labelField": "versionNumber"},
}, },
) )
active: bool = Field( active: bool = Field(
@ -182,7 +178,7 @@ class AutoVersion(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"label": "Workflow-ID", "label": "Workflow-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"}, "fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
}, },
) )
versionNumber: int = Field( versionNumber: int = Field(
@ -208,7 +204,7 @@ class AutoVersion(PowerOnModel):
publishedAt: Optional[float] = Field( publishedAt: Optional[float] = Field(
default=None, default=None,
description="Timestamp when version was published", description="Timestamp when version was published",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Veröffentlicht am"}, json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Veröffentlicht am"},
) )
publishedBy: Optional[str] = Field( publishedBy: Optional[str] = Field(
default=None, default=None,
@ -218,9 +214,7 @@ class AutoVersion(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Veröffentlicht von", "label": "Veröffentlicht von",
"fk_model": "User", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
"fk_label_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
}, },
) )
@ -243,7 +237,7 @@ class AutoRun(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"label": "Workflow-ID", "label": "Workflow-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"}, "fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
}, },
) )
label: Optional[str] = Field( label: Optional[str] = Field(
@ -259,9 +253,7 @@ class AutoRun(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Mandanten-ID", "label": "Mandanten-ID",
"fk_label_field": "label", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
"fk_model": "Mandate",
"fk_target": {"db": "poweron_app", "table": "Mandate"},
}, },
) )
ownerId: Optional[str] = Field( ownerId: Optional[str] = Field(
@ -272,9 +264,7 @@ class AutoRun(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Auslöser", "label": "Auslöser",
"fk_model": "User", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
"fk_label_field": "username",
"fk_target": {"db": "poweron_app", "table": "User"},
}, },
) )
versionId: Optional[str] = Field( versionId: Optional[str] = Field(
@ -285,7 +275,7 @@ class AutoRun(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Versions-ID", "label": "Versions-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion"}, "fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion", "labelField": "versionNumber"},
}, },
) )
status: str = Field( status: str = Field(
@ -301,12 +291,12 @@ class AutoRun(PowerOnModel):
startedAt: Optional[float] = Field( startedAt: Optional[float] = Field(
default=None, default=None,
description="Run start timestamp", description="Run start timestamp",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"}, json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
) )
completedAt: Optional[float] = Field( completedAt: Optional[float] = Field(
default=None, default=None,
description="Run completion timestamp", description="Run completion timestamp",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"}, json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
) )
nodeOutputs: Dict[str, Any] = Field( nodeOutputs: Dict[str, Any] = Field(
default_factory=dict, default_factory=dict,
@ -358,7 +348,7 @@ class AutoStepLog(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"label": "Lauf-ID", "label": "Lauf-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun"}, "fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun", "labelField": "label"},
}, },
) )
nodeId: str = Field( nodeId: str = Field(
@ -392,12 +382,12 @@ class AutoStepLog(PowerOnModel):
startedAt: Optional[float] = Field( startedAt: Optional[float] = Field(
default=None, default=None,
description="Step start timestamp", description="Step start timestamp",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"}, json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
) )
completedAt: Optional[float] = Field( completedAt: Optional[float] = Field(
default=None, default=None,
description="Step completion timestamp", description="Step completion timestamp",
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"}, json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
) )
durationMs: Optional[int] = Field( durationMs: Optional[int] = Field(
default=None, default=None,
@ -434,7 +424,7 @@ class AutoTask(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"label": "Lauf-ID", "label": "Lauf-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun"}, "fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun", "labelField": "label"},
}, },
) )
workflowId: str = Field( workflowId: str = Field(
@ -444,7 +434,7 @@ class AutoTask(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"label": "Workflow-ID", "label": "Workflow-ID",
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"}, "fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
}, },
) )
nodeId: str = Field( nodeId: str = Field(
@ -468,7 +458,7 @@ class AutoTask(PowerOnModel):
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": False, "frontend_required": False,
"label": "Zugewiesen an", "label": "Zugewiesen an",
"fk_target": {"db": "poweron_app", "table": "User"}, "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
}, },
) )
status: str = Field( status: str = Field(
@ -484,7 +474,7 @@ class AutoTask(PowerOnModel):
expiresAt: Optional[float] = Field( expiresAt: Optional[float] = Field(
default=None, default=None,
description="Expiration timestamp for the task", description="Expiration timestamp for the task",
json_schema_extra={"frontend_type": "datetime", "frontend_required": False, "label": "Läuft ab am"}, json_schema_extra={"frontend_type": "timestamp", "frontend_required": False, "label": "Läuft ab am"},
) )

View file

@ -32,7 +32,7 @@ class DataNeutraliserConfig(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
}, },
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
@ -42,7 +42,7 @@ class DataNeutraliserConfig(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
}, },
) )
userId: str = Field( userId: str = Field(
@ -52,7 +52,7 @@ class DataNeutraliserConfig(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"}, "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
}, },
) )
enabled: bool = Field( enabled: bool = Field(
@ -107,7 +107,7 @@ class DataNeutralizerAttributes(BaseModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
}, },
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
@ -117,7 +117,7 @@ class DataNeutralizerAttributes(BaseModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
}, },
) )
userId: str = Field( userId: str = Field(
@ -127,7 +127,7 @@ class DataNeutralizerAttributes(BaseModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"}, "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
}, },
) )
originalText: str = Field( originalText: str = Field(
@ -142,7 +142,7 @@ class DataNeutralizerAttributes(BaseModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"fk_target": {"db": "poweron_management", "table": "FileItem"}, "fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"},
}, },
) )
patternType: str = Field( patternType: str = Field(
@ -160,16 +160,16 @@ class DataNeutralizationSnapshot(BaseModel):
) )
mandateId: str = Field( mandateId: str = Field(
description="Mandate scope", description="Mandate scope",
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}}, json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
default="", default="",
description="Feature instance scope", description="Feature instance scope",
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}, json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
) )
userId: str = Field( userId: str = Field(
description="User who triggered neutralization", description="User who triggered neutralization",
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}}, json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
) )
sourceLabel: str = Field( sourceLabel: str = Field(
description="Human label, e.g. 'Prompt', 'Kontext', 'Nachricht 3'", description="Human label, e.g. 'Prompt', 'Kontext', 'Nachricht 3'",

View file

@ -288,7 +288,7 @@ class Kanton(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": False, "frontend_required": False,
"fk_target": {"db": "poweron_realestate", "table": "Land"}, "fk_target": {"db": "poweron_realestate", "table": "Land", "labelField": "label"},
}, },
) )
abk: Optional[str] = Field( abk: Optional[str] = Field(
@ -348,7 +348,7 @@ class Gemeinde(BaseModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": False, "frontend_required": False,
"fk_target": {"db": "poweron_realestate", "table": "Kanton"}, "fk_target": {"db": "poweron_realestate", "table": "Kanton", "labelField": "label"},
}, },
) )
plz: Optional[str] = Field( plz: Optional[str] = Field(
@ -398,7 +398,7 @@ class Parzelle(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Mandats-ID", "label": "Mandats-ID",
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
}, },
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
@ -408,7 +408,7 @@ class Parzelle(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Feature-Instanz-ID", "label": "Feature-Instanz-ID",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
}, },
) )
@ -472,7 +472,7 @@ class Parzelle(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": False, "frontend_required": False,
"fk_target": {"db": "poweron_realestate", "table": "Gemeinde"}, "fk_target": {"db": "poweron_realestate", "table": "Gemeinde", "labelField": "label"},
}, },
) )
@ -638,7 +638,7 @@ class Projekt(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Mandats-ID", "label": "Mandats-ID",
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
}, },
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
@ -648,7 +648,7 @@ class Projekt(PowerOnModel):
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"label": "Feature-Instanz-ID", "label": "Feature-Instanz-ID",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
}, },
) )
label: str = Field( label: str = Field(

View file

@ -228,31 +228,27 @@ def get_projects(
recordFilter = {"featureInstanceId": instanceId} recordFilter = {"featureInstanceId": instanceId}
if mode in ("filterValues", "ids"): if mode in ("filterValues", "ids"):
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels
items = interface.getProjekte(recordFilter=recordFilter) items = interface.getProjekte(recordFilter=recordFilter)
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items] itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
if mode == "filterValues": if mode == "filterValues":
if not column: if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues") raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
enrichRowsWithFkLabels(itemDicts, Projekt)
return handleFilterValuesInMemory(itemDicts, column, pagination) return handleFilterValuesInMemory(itemDicts, column, pagination)
return handleIdsInMemory(itemDicts, pagination) return handleIdsInMemory(itemDicts, pagination)
items = interface.getProjekte(recordFilter=recordFilter) items = interface.getProjekte(recordFilter=recordFilter)
paginationParams = _parsePagination(pagination) paginationParams = _parsePagination(pagination)
if paginationParams: if paginationParams:
if paginationParams.sort: from modules.routes.routeHelpers import applyFiltersAndSort
for sort_field in reversed(paginationParams.sort): itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
field_name = sort_field.field filtered = applyFiltersAndSort(itemDicts, paginationParams)
direction = sort_field.direction.lower() total_items = len(filtered)
items.sort(
key=lambda x: getattr(x, field_name, None),
reverse=(direction == "desc")
)
total_items = len(items)
total_pages = (total_items + paginationParams.pageSize - 1) // paginationParams.pageSize total_pages = (total_items + paginationParams.pageSize - 1) // paginationParams.pageSize
start_idx = (paginationParams.page - 1) * paginationParams.pageSize start_idx = (paginationParams.page - 1) * paginationParams.pageSize
end_idx = start_idx + paginationParams.pageSize end_idx = start_idx + paginationParams.pageSize
paginated_items = items[start_idx:end_idx] paginated_items = filtered[start_idx:end_idx]
return PaginatedResponse( return PaginatedResponse(
items=paginated_items, items=paginated_items,
pagination=PaginationMetadata( pagination=PaginationMetadata(
@ -373,31 +369,27 @@ def get_parcels(
recordFilter = {"featureInstanceId": instanceId} recordFilter = {"featureInstanceId": instanceId}
if mode in ("filterValues", "ids"): if mode in ("filterValues", "ids"):
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels
items = interface.getParzellen(recordFilter=recordFilter) items = interface.getParzellen(recordFilter=recordFilter)
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items] itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
if mode == "filterValues": if mode == "filterValues":
if not column: if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues") raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
enrichRowsWithFkLabels(itemDicts, Parzelle)
return handleFilterValuesInMemory(itemDicts, column, pagination) return handleFilterValuesInMemory(itemDicts, column, pagination)
return handleIdsInMemory(itemDicts, pagination) return handleIdsInMemory(itemDicts, pagination)
items = interface.getParzellen(recordFilter=recordFilter) items = interface.getParzellen(recordFilter=recordFilter)
paginationParams = _parsePagination(pagination) paginationParams = _parsePagination(pagination)
if paginationParams: if paginationParams:
if paginationParams.sort: from modules.routes.routeHelpers import applyFiltersAndSort
for sort_field in reversed(paginationParams.sort): itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
field_name = sort_field.field filtered = applyFiltersAndSort(itemDicts, paginationParams)
direction = sort_field.direction.lower() total_items = len(filtered)
items.sort(
key=lambda x: getattr(x, field_name, None),
reverse=(direction == "desc")
)
total_items = len(items)
total_pages = (total_items + paginationParams.pageSize - 1) // paginationParams.pageSize total_pages = (total_items + paginationParams.pageSize - 1) // paginationParams.pageSize
start_idx = (paginationParams.page - 1) * paginationParams.pageSize start_idx = (paginationParams.page - 1) * paginationParams.pageSize
end_idx = start_idx + paginationParams.pageSize end_idx = start_idx + paginationParams.pageSize
paginated_items = items[start_idx:end_idx] paginated_items = filtered[start_idx:end_idx]
return PaginatedResponse( return PaginatedResponse(
items=paginated_items, items=paginated_items,
pagination=PaginationMetadata( pagination=PaginationMetadata(

View file

@ -75,7 +75,7 @@ class RedmineInstanceConfig(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
}, },
) )
mandateId: Optional[str] = Field( mandateId: Optional[str] = Field(
@ -86,7 +86,7 @@ class RedmineInstanceConfig(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
}, },
) )
baseUrl: str = Field( baseUrl: str = Field(
@ -195,7 +195,7 @@ class RedmineTicketMirror(PowerOnModel):
featureInstanceId: str = Field( featureInstanceId: str = Field(
description="FK -> FeatureInstance.id", description="FK -> FeatureInstance.id",
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True, json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
) )
mandateId: Optional[str] = Field( mandateId: Optional[str] = Field(
default=None, default=None,
@ -226,14 +226,14 @@ class RedmineTicketMirror(PowerOnModel):
closedOnTs: Optional[float] = Field( closedOnTs: Optional[float] = Field(
default=None, default=None,
description="Best-effort UTC epoch when the ticket transitioned to a closed status. Approximated as updatedOnTs for closed tickets at sync time; used by Stats to render the open-vs-total snapshot chart.", description="Best-effort UTC epoch when the ticket transitioned to a closed status. Approximated as updatedOnTs for closed tickets at sync time; used by Stats to render the open-vs-total snapshot chart.",
json_schema_extra={"label": "closedOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True}, json_schema_extra={"label": "closedOn (epoch)", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
) )
createdOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Erstellt am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False}) createdOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Erstellt am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
updatedOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Geaendert am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False}) updatedOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Geaendert am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
createdOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from createdOn (for SQL filtering)", createdOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from createdOn (for SQL filtering)",
json_schema_extra={"label": "createdOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True}) json_schema_extra={"label": "createdOn (epoch)", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
updatedOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from updatedOn (for SQL filtering)", updatedOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from updatedOn (for SQL filtering)",
json_schema_extra={"label": "updatedOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True}) json_schema_extra={"label": "updatedOn (epoch)", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
customFields: Optional[List[Dict[str, Any]]] = Field( customFields: Optional[List[Dict[str, Any]]] = Field(
default=None, default=None,
description="List of {id,name,value} as returned by Redmine; stored as JSON", description="List of {id,name,value} as returned by Redmine; stored as JSON",
@ -270,7 +270,7 @@ class RedmineRelationMirror(PowerOnModel):
featureInstanceId: str = Field( featureInstanceId: str = Field(
description="FK -> FeatureInstance.id", description="FK -> FeatureInstance.id",
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True, json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
) )
redmineRelationId: int = Field( redmineRelationId: int = Field(
description="Redmine relation id (unique per feature instance)", description="Redmine relation id (unique per feature instance)",
@ -468,17 +468,17 @@ class RedmineSyncResultDto(BaseModel):
ticketsUpserted: int = 0 ticketsUpserted: int = 0
relationsUpserted: int = 0 relationsUpserted: int = 0
durationMs: int = 0 durationMs: int = 0
lastSyncAt: float lastSyncAt: float = Field(json_schema_extra={"frontend_type": "timestamp"})
error: Optional[str] = None error: Optional[str] = None
class RedmineSyncStatusDto(BaseModel): class RedmineSyncStatusDto(BaseModel):
instanceId: str instanceId: str
lastSyncAt: Optional[float] = None lastSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
lastFullSyncAt: Optional[float] = None lastFullSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
lastSyncDurationMs: Optional[int] = None lastSyncDurationMs: Optional[int] = None
lastSyncTicketCount: Optional[int] = None lastSyncTicketCount: Optional[int] = None
lastSyncErrorAt: Optional[float] = None lastSyncErrorAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
lastSyncErrorMessage: Optional[str] = None lastSyncErrorMessage: Optional[str] = None
mirroredTicketCount: int = 0 mirroredTicketCount: int = 0
mirroredRelationCount: int = 0 mirroredRelationCount: int = 0
@ -513,11 +513,11 @@ class RedmineConfigDto(BaseModel):
rootTrackerName: str = "Userstory" rootTrackerName: str = "Userstory"
defaultPeriodValue: Optional[Dict[str, Any]] = None defaultPeriodValue: Optional[Dict[str, Any]] = None
schemaCacheTtlSeconds: int = 24 * 60 * 60 schemaCacheTtlSeconds: int = 24 * 60 * 60
schemaCachedAt: Optional[float] = None schemaCachedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
isActive: bool = True isActive: bool = True
lastConnectedAt: Optional[float] = None lastConnectedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
lastSyncAt: Optional[float] = None lastSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
lastFullSyncAt: Optional[float] = None lastFullSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
lastSyncTicketCount: Optional[int] = None lastSyncTicketCount: Optional[int] = None
lastSyncErrorMessage: Optional[str] = None lastSyncErrorMessage: Optional[str] = None

View file

@ -91,8 +91,8 @@ class TeamsbotSession(PowerOnModel):
meetingLink: str = Field(description="Teams meeting join link") meetingLink: str = Field(description="Teams meeting join link")
botName: str = Field(default="AI Assistant", description="Display name of the bot in the meeting") botName: str = Field(default="AI Assistant", description="Display name of the bot in the meeting")
status: TeamsbotSessionStatus = Field(default=TeamsbotSessionStatus.PENDING, description="Current session status") status: TeamsbotSessionStatus = Field(default=TeamsbotSessionStatus.PENDING, description="Current session status")
startedAt: Optional[str] = Field(default=None, description="ISO timestamp when session started") startedAt: Optional[float] = Field(default=None, description="UTC unix timestamp when session started", json_schema_extra={"frontend_type": "timestamp"})
endedAt: Optional[str] = Field(default=None, description="ISO timestamp when session ended") endedAt: Optional[float] = Field(default=None, description="UTC unix timestamp when session ended", json_schema_extra={"frontend_type": "timestamp"})
startedByUserId: str = Field(description="User ID who started the session") startedByUserId: str = Field(description="User ID who started the session")
bridgeSessionId: Optional[str] = Field(default=None, description="Session ID on the .NET Media Bridge") bridgeSessionId: Optional[str] = Field(default=None, description="Session ID on the .NET Media Bridge")
meetingChatId: Optional[str] = Field(default=None, description="Teams meeting chat ID for Graph API messages") meetingChatId: Optional[str] = Field(default=None, description="Teams meeting chat ID for Graph API messages")
@ -109,7 +109,7 @@ class TeamsbotTranscript(PowerOnModel):
sessionId: str = Field(description="Session ID (FK)") sessionId: str = Field(description="Session ID (FK)")
speaker: Optional[str] = Field(default=None, description="Speaker name or identifier") speaker: Optional[str] = Field(default=None, description="Speaker name or identifier")
text: str = Field(description="Transcribed text") text: str = Field(description="Transcribed text")
timestamp: str = Field(description="ISO timestamp of the speech segment") timestamp: float = Field(description="UTC unix timestamp of the speech segment", json_schema_extra={"frontend_type": "timestamp"})
confidence: float = Field(default=0.0, ge=0.0, le=1.0, description="STT confidence score") confidence: float = Field(default=0.0, ge=0.0, le=1.0, description="STT confidence score")
language: Optional[str] = Field(default=None, description="Detected language code (e.g., de-DE)") language: Optional[str] = Field(default=None, description="Detected language code (e.g., de-DE)")
isFinal: bool = Field(default=True, description="Whether this is a final or interim result") isFinal: bool = Field(default=True, description="Whether this is a final or interim result")
@ -128,7 +128,7 @@ class TeamsbotBotResponse(PowerOnModel):
modelName: Optional[str] = Field(default=None, description="AI model used for this response") modelName: Optional[str] = Field(default=None, description="AI model used for this response")
processingTime: float = Field(default=0.0, description="Processing time in seconds") processingTime: float = Field(default=0.0, description="Processing time in seconds")
priceCHF: float = Field(default=0.0, description="Cost of this AI call in CHF") priceCHF: float = Field(default=0.0, description="Cost of this AI call in CHF")
timestamp: Optional[str] = Field(default=None, description="ISO timestamp of the response") timestamp: Optional[float] = Field(default=None, description="UTC unix timestamp of the response", json_schema_extra={"frontend_type": "timestamp"})
# ============================================================================ # ============================================================================
@ -315,8 +315,8 @@ class TeamsbotDirectorPrompt(PowerOnModel):
fileIds: List[str] = Field(default_factory=list, description="UDB-selected file/object IDs to attach as RAG context") fileIds: List[str] = Field(default_factory=list, description="UDB-selected file/object IDs to attach as RAG context")
status: TeamsbotDirectorPromptStatus = Field(default=TeamsbotDirectorPromptStatus.QUEUED, description="Lifecycle status") status: TeamsbotDirectorPromptStatus = Field(default=TeamsbotDirectorPromptStatus.QUEUED, description="Lifecycle status")
statusMessage: Optional[str] = Field(default=None, description="Optional error or status detail") statusMessage: Optional[str] = Field(default=None, description="Optional error or status detail")
createdAt: str = Field(default_factory=lambda: datetime.now(timezone.utc).isoformat(), description="ISO timestamp when created") createdAt: float = Field(default_factory=lambda: datetime.now(timezone.utc).timestamp(), description="UTC unix timestamp when created", json_schema_extra={"frontend_type": "timestamp"})
consumedAt: Optional[str] = Field(default=None, description="ISO timestamp when consumed (one-shot) or marked done") consumedAt: Optional[float] = Field(default=None, description="UTC unix timestamp when consumed (one-shot) or marked done", json_schema_extra={"frontend_type": "timestamp"})
agentRunId: Optional[str] = Field(default=None, description="Reference to the agent run that processed this prompt") agentRunId: Optional[str] = Field(default=None, description="Reference to the agent run that processed this prompt")
responseText: Optional[str] = Field(default=None, description="Final agent text delivered to the meeting") responseText: Optional[str] = Field(default=None, description="Final agent text delivered to the meeting")

View file

@ -87,7 +87,7 @@ class TeamsbotObjects:
if not includeEnded: if not includeEnded:
records = [r for r in records if r.get("status") != TeamsbotSessionStatus.ENDED.value] records = [r for r in records if r.get("status") != TeamsbotSessionStatus.ENDED.value]
# Sort by startedAt descending # Sort by startedAt descending
records.sort(key=lambda r: r.get("startedAt") or "", reverse=True) records.sort(key=lambda r: r.get("startedAt") or 0, reverse=True)
return records return records
def getActiveSessions(self, instanceId: str) -> List[Dict[str, Any]]: def getActiveSessions(self, instanceId: str) -> List[Dict[str, Any]]:
@ -133,7 +133,7 @@ class TeamsbotObjects:
TeamsbotTranscript, TeamsbotTranscript,
recordFilter={"sessionId": sessionId}, recordFilter={"sessionId": sessionId},
) )
records.sort(key=lambda r: r.get("timestamp") or "") records.sort(key=lambda r: r.get("timestamp") or 0)
if offset: if offset:
records = records[offset:] records = records[offset:]
if limit: if limit:
@ -146,7 +146,7 @@ class TeamsbotObjects:
TeamsbotTranscript, TeamsbotTranscript,
recordFilter={"sessionId": sessionId}, recordFilter={"sessionId": sessionId},
) )
records.sort(key=lambda r: r.get("timestamp") or "") records.sort(key=lambda r: r.get("timestamp") or 0)
return records[-count:] return records[-count:]
def createTranscript(self, transcriptData: Dict[str, Any]) -> Dict[str, Any]: def createTranscript(self, transcriptData: Dict[str, Any]) -> Dict[str, Any]:
@ -176,7 +176,7 @@ class TeamsbotObjects:
TeamsbotBotResponse, TeamsbotBotResponse,
recordFilter={"sessionId": sessionId}, recordFilter={"sessionId": sessionId},
) )
records.sort(key=lambda r: r.get("timestamp") or "") records.sort(key=lambda r: r.get("timestamp") or 0)
return records return records
def createBotResponse(self, responseData: Dict[str, Any]) -> Dict[str, Any]: def createBotResponse(self, responseData: Dict[str, Any]) -> Dict[str, Any]:
@ -293,7 +293,7 @@ class TeamsbotObjects:
if operatorUserId: if operatorUserId:
recordFilter["operatorUserId"] = operatorUserId recordFilter["operatorUserId"] = operatorUserId
records = self.db.getRecordset(TeamsbotDirectorPrompt, recordFilter=recordFilter) records = self.db.getRecordset(TeamsbotDirectorPrompt, recordFilter=recordFilter)
records.sort(key=lambda r: r.get("createdAt") or "") records.sort(key=lambda r: r.get("createdAt") or 0)
return records return records
def getActivePersistentPrompts(self, sessionId: str) -> List[Dict[str, Any]]: def getActivePersistentPrompts(self, sessionId: str) -> List[Dict[str, Any]]:
@ -310,7 +310,7 @@ class TeamsbotObjects:
TeamsbotDirectorPromptStatus.FAILED.value, TeamsbotDirectorPromptStatus.FAILED.value,
} }
active = [r for r in records if r.get("status") not in terminal] active = [r for r in records if r.get("status") not in terminal]
active.sort(key=lambda r: r.get("createdAt") or "") active.sort(key=lambda r: r.get("createdAt") or 0)
return active return active
def updateDirectorPrompt(self, promptId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]: def updateDirectorPrompt(self, promptId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:

View file

@ -11,13 +11,14 @@ import re
import asyncio import asyncio
import time import time
import base64 import base64
from datetime import datetime, timezone
from typing import Optional, Dict, Any, List, Callable from typing import Optional, Dict, Any, List, Callable
from fastapi import WebSocket from fastapi import WebSocket
from modules.datamodels.datamodelUam import User from modules.datamodels.datamodelUam import User
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
from modules.shared.timeUtils import getUtcTimestamp, getIsoTimestamp from modules.shared.timeUtils import getUtcTimestamp
from modules.serviceCenter import getService as _getServiceCenterService from modules.serviceCenter import getService as _getServiceCenterService
from modules.serviceCenter.context import ServiceCenterContext from modules.serviceCenter.context import ServiceCenterContext
@ -554,7 +555,7 @@ async def _emitSessionEvent(sessionId: str, eventType: str, data: Any):
Creates the queue on-demand so events are never silently dropped.""" Creates the queue on-demand so events are never silently dropped."""
if sessionId not in sessionEvents: if sessionId not in sessionEvents:
sessionEvents[sessionId] = asyncio.Queue() sessionEvents[sessionId] = asyncio.Queue()
await sessionEvents[sessionId].put({"type": eventType, "data": data, "timestamp": getIsoTimestamp()}) await sessionEvents[sessionId].put({"type": eventType, "data": data, "timestamp": getUtcTimestamp()})
def _normalizeGatewayHostForBotWs(host: str) -> str: def _normalizeGatewayHostForBotWs(host: str) -> str:
@ -780,7 +781,7 @@ class TeamsbotService:
interface.updateSession(sessionId, { interface.updateSession(sessionId, {
"status": TeamsbotSessionStatus.ENDED.value, "status": TeamsbotSessionStatus.ENDED.value,
"endedAt": getIsoTimestamp(), "endedAt": getUtcTimestamp(),
}) })
await _emitSessionEvent(sessionId, "statusChange", {"status": "ended"}) await _emitSessionEvent(sessionId, "statusChange", {"status": "ended"})
@ -794,7 +795,7 @@ class TeamsbotService:
interface.updateSession(sessionId, { interface.updateSession(sessionId, {
"status": TeamsbotSessionStatus.ERROR.value, "status": TeamsbotSessionStatus.ERROR.value,
"errorMessage": str(e), "errorMessage": str(e),
"endedAt": getIsoTimestamp(), "endedAt": getUtcTimestamp(),
}) })
# Cleanup event queue # Cleanup event queue
@ -855,7 +856,7 @@ class TeamsbotService:
try: try:
await _emitSessionEvent(sessionId, "botConnectionState", { await _emitSessionEvent(sessionId, "botConnectionState", {
"connected": True, "connected": True,
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
except Exception: except Exception:
pass pass
@ -1029,7 +1030,7 @@ class TeamsbotService:
"status": f"playback_{status}", "status": f"playback_{status}",
"hasWebSocket": True, "hasWebSocket": True,
"message": ackMessage, "message": ackMessage,
"timestamp": playback.get("timestamp") or getIsoTimestamp(), "timestamp": playback.get("timestamp") or getUtcTimestamp(),
"format": playback.get("format"), "format": playback.get("format"),
"bytesBase64": playback.get("bytesBase64"), "bytesBase64": playback.get("bytesBase64"),
}) })
@ -1045,7 +1046,7 @@ class TeamsbotService:
"mfaType": mfaType, "mfaType": mfaType,
"displayNumber": displayNumber, "displayNumber": displayNumber,
"prompt": prompt, "prompt": prompt,
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
from .routeFeatureTeamsbot import mfaCodeQueues, mfaWaitTasks from .routeFeatureTeamsbot import mfaCodeQueues, mfaWaitTasks
@ -1094,7 +1095,7 @@ class TeamsbotService:
"reason": reason, "reason": reason,
"message": errorData.get("message", "Chat message could not be sent"), "message": errorData.get("message", "Chat message could not be sent"),
"text": failedText, "text": failedText,
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
elif msgType == "mfaResolved": elif msgType == "mfaResolved":
@ -1107,7 +1108,7 @@ class TeamsbotService:
mfaCodeQueues.pop(sessionId, None) mfaCodeQueues.pop(sessionId, None)
await _emitSessionEvent(sessionId, "mfaResolved", { await _emitSessionEvent(sessionId, "mfaResolved", {
"success": success, "success": success,
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
except Exception as e: except Exception as e:
@ -1122,7 +1123,7 @@ class TeamsbotService:
try: try:
await _emitSessionEvent(sessionId, "botConnectionState", { await _emitSessionEvent(sessionId, "botConnectionState", {
"connected": False, "connected": False,
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
except Exception: except Exception:
pass pass
@ -1156,9 +1157,9 @@ class TeamsbotService:
if errorMessage: if errorMessage:
updates["errorMessage"] = errorMessage updates["errorMessage"] = errorMessage
if dbStatus == TeamsbotSessionStatus.ACTIVE.value: if dbStatus == TeamsbotSessionStatus.ACTIVE.value:
updates["startedAt"] = getIsoTimestamp() updates["startedAt"] = getUtcTimestamp()
elif dbStatus in [TeamsbotSessionStatus.ENDED.value, TeamsbotSessionStatus.ERROR.value]: elif dbStatus in [TeamsbotSessionStatus.ENDED.value, TeamsbotSessionStatus.ERROR.value]:
updates["endedAt"] = getIsoTimestamp() updates["endedAt"] = getUtcTimestamp()
interface.updateSession(sessionId, updates) interface.updateSession(sessionId, updates)
await _emitSessionEvent(sessionId, "statusChange", {"status": status, "errorMessage": errorMessage}) await _emitSessionEvent(sessionId, "statusChange", {"status": status, "errorMessage": errorMessage})
@ -1350,7 +1351,7 @@ class TeamsbotService:
sessionId=sessionId, sessionId=sessionId,
speaker=speaker, speaker=speaker,
text=text, text=text,
timestamp=getIsoTimestamp(), timestamp=getUtcTimestamp(),
confidence=1.0, confidence=1.0,
language=self.config.language, language=self.config.language,
isFinal=True, isFinal=True,
@ -1363,7 +1364,7 @@ class TeamsbotService:
"speaker": speaker, "speaker": speaker,
"text": text, "text": text,
"confidence": 1.0, "confidence": 1.0,
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
"isContinuation": False, "isContinuation": False,
"source": "chatHistory", "source": "chatHistory",
"isHistory": True, "isHistory": True,
@ -1407,7 +1408,7 @@ class TeamsbotService:
sessionId=sessionId, sessionId=sessionId,
speaker=speaker, speaker=speaker,
text=text, text=text,
timestamp=getIsoTimestamp(), timestamp=getUtcTimestamp(),
confidence=1.0, confidence=1.0,
language=self.config.language, language=self.config.language,
isFinal=isFinal, isFinal=isFinal,
@ -1450,7 +1451,7 @@ class TeamsbotService:
"speaker": speaker, "speaker": speaker,
"text": displayText, "text": displayText,
"confidence": 1.0, "confidence": 1.0,
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
"isContinuation": isMerge, "isContinuation": isMerge,
"source": source, "source": source,
"speakerResolvedFromHint": ( "speakerResolvedFromHint": (
@ -1690,7 +1691,7 @@ class TeamsbotService:
await _emitSessionEvent(sessionId, "speechCancelled", { await _emitSessionEvent(sessionId, "speechCancelled", {
"reason": reason, "reason": reason,
"generation": gen, "generation": gen,
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
except Exception: except Exception:
pass pass
@ -2079,7 +2080,7 @@ class TeamsbotService:
try: try:
await _emitSessionEvent(sessionId, "quickAck", { await _emitSessionEvent(sessionId, "quickAck", {
"text": ackText, "text": ackText,
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
cancelHook = self._makeAnswerCancelHook() cancelHook = self._makeAnswerCancelHook()
async with self._meetingTtsLock: async with self._meetingTtsLock:
@ -2387,7 +2388,7 @@ class TeamsbotService:
"status": "requested", "status": "requested",
"hasWebSocket": websocket is not None, "hasWebSocket": websocket is not None,
"message": "TTS generation requested", "message": "TTS generation requested",
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
logger.info( logger.info(
f"Session {sessionId}: TTS requested (websocket_available={websocket is not None})" f"Session {sessionId}: TTS requested (websocket_available={websocket is not None})"
@ -2400,7 +2401,7 @@ class TeamsbotService:
"status": "unavailable", "status": "unavailable",
"hasWebSocket": False, "hasWebSocket": False,
"message": "TTS skipped — bot websocket unavailable", "message": "TTS skipped — bot websocket unavailable",
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
if not sendChat: if not sendChat:
sendChat = True sendChat = True
@ -2428,7 +2429,7 @@ class TeamsbotService:
"hasWebSocket": True, "hasWebSocket": True,
"chunks": ttsOutcome.get("chunks"), "chunks": ttsOutcome.get("chunks"),
"played": ttsOutcome.get("played"), "played": ttsOutcome.get("played"),
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
else: else:
logger.warning( logger.warning(
@ -2440,7 +2441,7 @@ class TeamsbotService:
"chunks": ttsOutcome.get("chunks"), "chunks": ttsOutcome.get("chunks"),
"played": ttsOutcome.get("played"), "played": ttsOutcome.get("played"),
"message": ttsOutcome.get("error"), "message": ttsOutcome.get("error"),
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
if not sendChat: if not sendChat:
sendChat = True # Fallback to chat if voice-only and TTS failed sendChat = True # Fallback to chat if voice-only and TTS failed
@ -2469,7 +2470,7 @@ class TeamsbotService:
modelName=response.modelName, modelName=response.modelName,
processingTime=response.processingTime, processingTime=response.processingTime,
priceCHF=response.priceCHF, priceCHF=response.priceCHF,
timestamp=getIsoTimestamp(), timestamp=getUtcTimestamp(),
).model_dump() ).model_dump()
createdResponse = interface.createBotResponse(botResponseData) createdResponse = interface.createBotResponse(botResponseData)
@ -2501,7 +2502,7 @@ class TeamsbotService:
sessionId=sessionId, sessionId=sessionId,
speaker=self.config.botName, speaker=self.config.botName,
text=storedText, text=storedText,
timestamp=getIsoTimestamp(), timestamp=getUtcTimestamp(),
confidence=1.0, confidence=1.0,
language=self.config.language, language=self.config.language,
isFinal=True, isFinal=True,
@ -2520,7 +2521,7 @@ class TeamsbotService:
"speaker": self.config.botName, "speaker": self.config.botName,
"text": storedText, "text": storedText,
"confidence": 1.0, "confidence": 1.0,
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
"isContinuation": False, "isContinuation": False,
"source": "botResponse", "source": "botResponse",
"speakerResolvedFromHint": False, "speakerResolvedFromHint": False,
@ -2557,7 +2558,7 @@ class TeamsbotService:
modelName=response.modelName, modelName=response.modelName,
processingTime=response.processingTime, processingTime=response.processingTime,
priceCHF=response.priceCHF, priceCHF=response.priceCHF,
timestamp=getIsoTimestamp(), timestamp=getUtcTimestamp(),
).model_dump() ).model_dump()
createdResponse = interface.createBotResponse(botResponseData) createdResponse = interface.createBotResponse(botResponseData)
await _emitSessionEvent(sessionId, "botResponse", { await _emitSessionEvent(sessionId, "botResponse", {
@ -2707,7 +2708,7 @@ class TeamsbotService:
sessionId=sessionId, sessionId=sessionId,
speaker=self.config.botName, speaker=self.config.botName,
text=chatText, text=chatText,
timestamp=getIsoTimestamp(), timestamp=getUtcTimestamp(),
confidence=1.0, confidence=1.0,
language=self.config.language, language=self.config.language,
isFinal=True, isFinal=True,
@ -2732,7 +2733,7 @@ class TeamsbotService:
"speaker": self.config.botName, "speaker": self.config.botName,
"text": chatText, "text": chatText,
"confidence": 1.0, "confidence": 1.0,
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
"isContinuation": False, "isContinuation": False,
"source": "chat", "source": "chat",
"speakerResolvedFromHint": False, "speakerResolvedFromHint": False,
@ -2749,13 +2750,15 @@ class TeamsbotService:
from . import interfaceFeatureTeamsbot as interfaceDb from . import interfaceFeatureTeamsbot as interfaceDb
interface = interfaceDb.getInterface(self.currentUser, self.mandateId, self.instanceId) interface = interfaceDb.getInterface(self.currentUser, self.mandateId, self.instanceId)
transcripts = interface.getTranscripts(sessionId) transcripts = interface.getTranscripts(sessionId)
fromDt = params.get("fromdatetime") or params.get("fromDateTime") fromDtRaw = params.get("fromdatetime") or params.get("fromDateTime")
toDt = params.get("todatetime") or params.get("toDateTime") toDtRaw = params.get("todatetime") or params.get("toDateTime")
fromTs = datetime.fromisoformat(fromDtRaw).replace(tzinfo=timezone.utc).timestamp() if fromDtRaw else None
toTs = datetime.fromisoformat(toDtRaw).replace(tzinfo=timezone.utc).timestamp() if toDtRaw else None
chatOnly = [t for t in transcripts if t.get("source") in ("chat", "chatHistory")] chatOnly = [t for t in transcripts if t.get("source") in ("chat", "chatHistory")]
if fromDt: if fromTs is not None:
chatOnly = [t for t in chatOnly if (t.get("timestamp") or "") >= fromDt] chatOnly = [t for t in chatOnly if (t.get("timestamp") or 0) >= fromTs]
if toDt: if toTs is not None:
chatOnly = [t for t in chatOnly if (t.get("timestamp") or "") <= toDt] chatOnly = [t for t in chatOnly if (t.get("timestamp") or 0) <= toTs]
summary = "\n".join(f"[{t.get('speaker', '?')}]: {t.get('text', '')}" for t in chatOnly[-20:]) summary = "\n".join(f"[{t.get('speaker', '?')}]: {t.get('text', '')}" for t in chatOnly[-20:])
if not summary: if not summary:
summary = "Keine Chat-Nachrichten im angegebenen Zeitraum." summary = "Keine Chat-Nachrichten im angegebenen Zeitraum."
@ -3002,7 +3005,7 @@ class TeamsbotService:
"text": (prompt.get("text") or "").strip(), "text": (prompt.get("text") or "").strip(),
"fileIds": list(prompt.get("fileIds") or []), "fileIds": list(prompt.get("fileIds") or []),
"note": (internalNote or meetingText or "").strip(), "note": (internalNote or meetingText or "").strip(),
"recordedAt": getIsoTimestamp(), "recordedAt": getUtcTimestamp(),
}) })
if len(self._recentDirectorBriefings) > _RECENT_DIRECTOR_BRIEFINGS_MAX: if len(self._recentDirectorBriefings) > _RECENT_DIRECTOR_BRIEFINGS_MAX:
self._recentDirectorBriefings = self._recentDirectorBriefings[ self._recentDirectorBriefings = self._recentDirectorBriefings[
@ -3066,7 +3069,7 @@ class TeamsbotService:
return False return False
interface.updateDirectorPrompt(promptId, { interface.updateDirectorPrompt(promptId, {
"status": TeamsbotDirectorPromptStatus.CONSUMED.value, "status": TeamsbotDirectorPromptStatus.CONSUMED.value,
"consumedAt": getIsoTimestamp(), "consumedAt": getUtcTimestamp(),
"statusMessage": "Removed by operator", "statusMessage": "Removed by operator",
}) })
self._activePersistentPrompts = [ self._activePersistentPrompts = [
@ -3187,7 +3190,7 @@ class TeamsbotService:
} }
if not isPersistent: if not isPersistent:
updates["status"] = TeamsbotDirectorPromptStatus.CONSUMED.value updates["status"] = TeamsbotDirectorPromptStatus.CONSUMED.value
updates["consumedAt"] = getIsoTimestamp() updates["consumedAt"] = getUtcTimestamp()
interface.updateDirectorPrompt(promptId, updates) interface.updateDirectorPrompt(promptId, updates)
await _emitSessionEvent(sessionId, "directorPrompt", { await _emitSessionEvent(sessionId, "directorPrompt", {
"id": promptId, "id": promptId,
@ -3300,7 +3303,7 @@ class TeamsbotService:
await _emitSessionEvent(sessionId, "agentRun", { await _emitSessionEvent(sessionId, "agentRun", {
"status": "interimNotice", "status": "interimNotice",
"message": text, "message": text,
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
async def _runAgentForMeeting( async def _runAgentForMeeting(
@ -3352,7 +3355,7 @@ class TeamsbotService:
"source": sourceLabel, "source": sourceLabel,
"promptId": promptId, "promptId": promptId,
"status": "started", "status": "started",
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
# Director prompts run silently by default — no spontaneous "moment please" # Director prompts run silently by default — no spontaneous "moment please"
@ -3577,7 +3580,7 @@ class TeamsbotService:
"chunks": ttsOutcome.get("chunks"), "chunks": ttsOutcome.get("chunks"),
"played": ttsOutcome.get("played"), "played": ttsOutcome.get("played"),
"error": ttsOutcome.get("error"), "error": ttsOutcome.get("error"),
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
if not ttsOutcome.get("success"): if not ttsOutcome.get("success"):
logger.warning( logger.warning(
@ -3615,7 +3618,7 @@ class TeamsbotService:
modelName="agent", modelName="agent",
processingTime=0.0, processingTime=0.0,
priceCHF=0.0, priceCHF=0.0,
timestamp=getIsoTimestamp(), timestamp=getUtcTimestamp(),
).model_dump() ).model_dump()
createdResponse = interface.createBotResponse(botResponseData) createdResponse = interface.createBotResponse(botResponseData)
@ -3635,7 +3638,7 @@ class TeamsbotService:
sessionId=sessionId, sessionId=sessionId,
speaker=self.config.botName, speaker=self.config.botName,
text=text, text=text,
timestamp=getIsoTimestamp(), timestamp=getUtcTimestamp(),
confidence=1.0, confidence=1.0,
language=self.config.language, language=self.config.language,
isFinal=True, isFinal=True,
@ -3661,7 +3664,7 @@ class TeamsbotService:
"speaker": self.config.botName, "speaker": self.config.botName,
"text": text, "text": text,
"confidence": 1.0, "confidence": 1.0,
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
"isContinuation": False, "isContinuation": False,
"source": "botResponse", "source": "botResponse",
"speakerResolvedFromHint": False, "speakerResolvedFromHint": False,
@ -3710,7 +3713,7 @@ class TeamsbotService:
modelName="agent", modelName="agent",
processingTime=0.0, processingTime=0.0,
priceCHF=0.0, priceCHF=0.0,
timestamp=getIsoTimestamp(), timestamp=getUtcTimestamp(),
).model_dump() ).model_dump()
createdResponse = interface.createBotResponse(botResponseData) createdResponse = interface.createBotResponse(botResponseData)
@ -3828,7 +3831,7 @@ class TeamsbotService:
"status": "requested", "status": "requested",
"hasWebSocket": True, "hasWebSocket": True,
"message": "Greeting TTS requested", "message": "Greeting TTS requested",
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
cancelHook = self._makeAnswerCancelHook() cancelHook = self._makeAnswerCancelHook()
async with self._meetingTtsLock: async with self._meetingTtsLock:
@ -3851,7 +3854,7 @@ class TeamsbotService:
"hasWebSocket": True, "hasWebSocket": True,
"chunks": ttsOutcome.get("chunks"), "chunks": ttsOutcome.get("chunks"),
"played": ttsOutcome.get("played"), "played": ttsOutcome.get("played"),
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
else: else:
logger.warning( logger.warning(
@ -3861,7 +3864,7 @@ class TeamsbotService:
"status": "failed", "status": "failed",
"hasWebSocket": True, "hasWebSocket": True,
"message": ttsOutcome.get("error"), "message": ttsOutcome.get("error"),
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
if sendToChat: if sendToChat:
@ -3881,7 +3884,7 @@ class TeamsbotService:
sessionId=sessionId, sessionId=sessionId,
speaker=self.config.botName, speaker=self.config.botName,
text=greetingText, text=greetingText,
timestamp=getIsoTimestamp(), timestamp=getUtcTimestamp(),
confidence=1.0, confidence=1.0,
language=greetingLang, language=greetingLang,
isFinal=True, isFinal=True,
@ -3905,14 +3908,14 @@ class TeamsbotService:
"responseType": TeamsbotResponseType.AUDIO.value, "responseType": TeamsbotResponseType.AUDIO.value,
"detectedIntent": "greeting", "detectedIntent": "greeting",
"reasoning": "Automatic join greeting", "reasoning": "Automatic join greeting",
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
}) })
await _emitSessionEvent(sessionId, "transcript", { await _emitSessionEvent(sessionId, "transcript", {
"id": greetingTranscript.get("id"), "id": greetingTranscript.get("id"),
"speaker": self.config.botName, "speaker": self.config.botName,
"text": greetingText, "text": greetingText,
"confidence": 1.0, "confidence": 1.0,
"timestamp": getIsoTimestamp(), "timestamp": getUtcTimestamp(),
"isContinuation": False, "isContinuation": False,
"source": "botResponse", "source": "botResponse",
"speakerResolvedFromHint": False, "speakerResolvedFromHint": False,

View file

@ -8,6 +8,7 @@ Encapsulates: config loading -> connector resolution -> duplicate check -> push
import json import json
import logging import logging
import time import time
from datetime import datetime as _dt, timezone as _tz
from typing import List, Dict, Any, Optional from typing import List, Dict, Any, Optional
from .accountingConnectorBase import ( from .accountingConnectorBase import (
@ -103,9 +104,12 @@ class AccountingBridge:
costCenter=position.get("costCenter"), costCenter=position.get("costCenter"),
)) ))
valutaTs = position.get("valuta")
bookingDateStr = _dt.fromtimestamp(valutaTs, tz=_tz.utc).strftime("%Y-%m-%d") if valutaTs else ""
return AccountingBooking( return AccountingBooking(
reference=position.get("bookingReference") or position.get("id", ""), reference=position.get("bookingReference") or position.get("id", ""),
bookingDate=position.get("valuta") or "", bookingDate=bookingDateStr,
description=position.get("desc", ""), description=position.get("desc", ""),
lines=lines, lines=lines,
) )

View file

@ -21,6 +21,7 @@ import logging
import os import os
import time import time
from collections import defaultdict from collections import defaultdict
from datetime import datetime as _dt, timezone as _tz
from pathlib import Path from pathlib import Path
from typing import Callable, Dict, Any, List, Optional, Type from typing import Callable, Dict, Any, List, Optional, Type
@ -33,6 +34,23 @@ logger = logging.getLogger(__name__)
_HEARTBEAT_EVERY = 500 _HEARTBEAT_EVERY = 500
def _isoDateToTimestamp(raw: Any) -> Optional[float]:
"""Convert an ISO date string (``YYYY-MM-DD`` or datetime) to a UTC
midnight unix timestamp. Returns ``None`` only when *raw* is
falsy/None. Raises ``ValueError`` for non-empty but unparseable
values so import errors are never silently swallowed.
"""
if raw is None or raw == "":
return None
s = str(raw).split("T")[0].strip()[:10]
if not s:
return None
try:
return _dt.strptime(s, "%Y-%m-%d").replace(tzinfo=_tz.utc).timestamp()
except ValueError:
raise ValueError(f"Cannot parse bookingDate '{raw}' as YYYY-MM-DD")
def _isIncomeStatementAccount(accountNumber: str) -> bool: def _isIncomeStatementAccount(accountNumber: str) -> bool:
"""Swiss KMU-Kontenrahmen heuristic: 1xxx + 2xxx -> balance sheet """Swiss KMU-Kontenrahmen heuristic: 1xxx + 2xxx -> balance sheet
(cumulative carry-over across years); 3xxx..9xxx -> income statement (cumulative carry-over across years); 3xxx..9xxx -> income statement
@ -360,8 +378,8 @@ class AccountingDataSync:
logger.exception(f"AccountingDataSync: failed to write core lastSync* fields for cfg {cfgId}: {coreErr}") logger.exception(f"AccountingDataSync: failed to write core lastSync* fields for cfg {cfgId}: {coreErr}")
summary["errors"].append(f"Persist lastSync core: {coreErr}") summary["errors"].append(f"Persist lastSync core: {coreErr}")
extPayload = { extPayload = {
"lastSyncDateFrom": dateFrom, "lastSyncDateFrom": _isoDateToTimestamp(dateFrom),
"lastSyncDateTo": dateTo, "lastSyncDateTo": _isoDateToTimestamp(dateTo),
"lastSyncCounts": { "lastSyncCounts": {
"accounts": int(summary.get("accounts", 0)), "accounts": int(summary.get("accounts", 0)),
"journalEntries": int(summary.get("journalEntries", 0)), "journalEntries": int(summary.get("journalEntries", 0)),
@ -432,18 +450,19 @@ class AccountingDataSync:
newestDate: Optional[str] = None newestDate: Optional[str] = None
for raw in rawEntries: for raw in rawEntries:
entryId = str(_uuid.uuid4()) entryId = str(_uuid.uuid4())
bookingDate = raw.get("bookingDate") rawDate = raw.get("bookingDate")
if bookingDate: bookingTs = _isoDateToTimestamp(rawDate)
normalized = str(bookingDate).split("T")[0][:10] if rawDate:
if normalized: isoDay = str(rawDate).split("T")[0][:10]
if oldestDate is None or normalized < oldestDate: if isoDay:
oldestDate = normalized if oldestDate is None or isoDay < oldestDate:
if newestDate is None or normalized > newestDate: oldestDate = isoDay
newestDate = normalized if newestDate is None or isoDay > newestDate:
newestDate = isoDay
entryRows.append({ entryRows.append({
"id": entryId, "id": entryId,
"externalId": raw.get("externalId"), "externalId": raw.get("externalId"),
"bookingDate": bookingDate, "bookingDate": bookingTs,
"reference": raw.get("reference"), "reference": raw.get("reference"),
"description": raw.get("description", ""), "description": raw.get("description", ""),
"currency": raw.get("currency", "CHF"), "currency": raw.get("currency", "CHF"),
@ -501,17 +520,14 @@ class AccountingDataSync:
"""Persist account balances per (account, period) into ``TrusteeDataAccountBalance``. """Persist account balances per (account, period) into ``TrusteeDataAccountBalance``.
Source of truth (``source="connector"``): the list returned by Source of truth (``source="connector"``): the list returned by
``BaseAccountingConnector.getAccountBalances`` is persisted 1:1. ``BaseAccountingConnector.getAccountBalances`` is persisted with
``openingBalance``/``closingBalance`` from the connector. If the
connector doesn't supply ``debitTotal``/``creditTotal`` (e.g. RMA's
``/gl/saldo`` only returns net balance), those fields are enriched
from the already-imported journal lines.
Fallback (``source="local-fallback"``): aggregate the just-persisted Fallback (``source="local-fallback"``): aggregate the just-persisted
journal lines into **cumulative** balances. Unlike the previous journal lines into **cumulative** balances.
implementation, this version (a) carries the cumulative balance
forward across months/years for balance-sheet accounts, (b) resets
income-statement accounts at fiscal-year start, and (c) computes
``openingBalance`` correctly as the previous period's
``closingBalance``. ``openingBalance`` of the very first imported
period stays at 0 (no prior data available -- by design; see plan
document for rationale).
""" """
t0 = time.time() t0 = time.time()
self._bulkClear(modelBalance, featureInstanceId) self._bulkClear(modelBalance, featureInstanceId)
@ -519,6 +535,9 @@ class AccountingDataSync:
if connectorBalances: if connectorBalances:
rows = [_balanceModelToRow(b, scope) for b in connectorBalances] rows = [_balanceModelToRow(b, scope) for b in connectorBalances]
movements = self._aggregateJournalMovements(featureInstanceId, modelEntry, modelLine)
if movements:
self._enrichRowsWithMovements(rows, movements)
n = self._bulkCreate(modelBalance, rows) n = self._bulkCreate(modelBalance, rows)
logger.info( logger.info(
f"Persisted {n} balances for {featureInstanceId} in {time.time() - t0:.1f}s " f"Persisted {n} balances for {featureInstanceId} in {time.time() - t0:.1f}s "
@ -534,19 +553,19 @@ class AccountingDataSync:
) )
return n return n
def _buildLocalBalanceFallback( def _aggregateJournalMovements(
self, self,
featureInstanceId: str, featureInstanceId: str,
modelEntry: Type, modelEntry: Type,
modelLine: Type, modelLine: Type,
scope: Dict[str, Any], ) -> Dict[tuple, Dict[str, float]]:
) -> List[Dict[str, Any]]: """Aggregate debit/credit movements per ``(accountNumber, year, month)``
"""Aggregate ``TrusteeDataJournalLine`` rows into cumulative period balances. from the already-persisted journal lines.
Returns rows ready for ``_bulkCreate``. Walks every account Returns ``{(accNo, year, month): {"debit": float, "credit": float}}``.
chronologically through all years observed in the journal so the Used by both the local-fallback balance builder and the connector-balance
cumulative balance and per-period opening are exact (within the enrichment (RMA's ``/gl/saldo`` delivers net balance but no debit/credit
bounds of the imported window). breakdown).
""" """
entries = self._if.db.getRecordset( entries = self._if.db.getRecordset(
modelEntry, recordFilter={"featureInstanceId": featureInstanceId}, modelEntry, recordFilter={"featureInstanceId": featureInstanceId},
@ -563,8 +582,6 @@ class AccountingDataSync:
) or [] ) or []
movements: Dict[tuple, Dict[str, float]] = defaultdict(lambda: {"debit": 0.0, "credit": 0.0}) movements: Dict[tuple, Dict[str, float]] = defaultdict(lambda: {"debit": 0.0, "credit": 0.0})
observedYears: set = set()
observedAccounts: set = set()
for ln in lines: for ln in lines:
if isinstance(ln, dict): if isinstance(ln, dict):
jeid = ln.get("journalEntryId", "") jeid = ln.get("journalEntryId", "")
@ -577,19 +594,71 @@ class AccountingDataSync:
debit = float(getattr(ln, "debitAmount", 0)) debit = float(getattr(ln, "debitAmount", 0))
credit = float(getattr(ln, "creditAmount", 0)) credit = float(getattr(ln, "creditAmount", 0))
bdate = entryDates.get(jeid, "") bdate = entryDates.get(jeid)
if not accNo or not bdate: if not accNo or not bdate:
continue continue
parts = str(bdate).split("-")
if len(parts) < 2:
continue
try: try:
year = int(parts[0]) dt = _dt.fromtimestamp(float(bdate), tz=_tz.utc)
month = int(parts[1]) year = dt.year
except ValueError: month = dt.month
except (ValueError, TypeError, OSError):
continue continue
movements[(accNo, year, month)]["debit"] += debit movements[(accNo, year, month)]["debit"] += debit
movements[(accNo, year, month)]["credit"] += credit movements[(accNo, year, month)]["credit"] += credit
return movements
@staticmethod
def _enrichRowsWithMovements(
rows: List[Dict[str, Any]],
movements: Dict[tuple, Dict[str, float]],
) -> None:
"""Patch ``debitTotal`` / ``creditTotal`` on balance rows from journal movements.
For monthly rows: use the exact month's movement.
For annual rows (``periodMonth=0``): sum all 12 months of that year+account.
Only overwrites if the existing value is 0 (connector didn't provide it).
"""
for row in rows:
if row.get("debitTotal", 0) != 0 or row.get("creditTotal", 0) != 0:
continue
accNo = row.get("accountNumber", "")
year = row.get("periodYear", 0)
month = row.get("periodMonth", 0)
if month > 0:
mov = movements.get((accNo, year, month))
if mov:
row["debitTotal"] = round(mov["debit"], 2)
row["creditTotal"] = round(mov["credit"], 2)
else:
yearDebit = 0.0
yearCredit = 0.0
for m in range(1, 13):
mov = movements.get((accNo, year, m))
if mov:
yearDebit += mov["debit"]
yearCredit += mov["credit"]
if yearDebit or yearCredit:
row["debitTotal"] = round(yearDebit, 2)
row["creditTotal"] = round(yearCredit, 2)
def _buildLocalBalanceFallback(
self,
featureInstanceId: str,
modelEntry: Type,
modelLine: Type,
scope: Dict[str, Any],
) -> List[Dict[str, Any]]:
"""Aggregate ``TrusteeDataJournalLine`` rows into cumulative period balances.
Returns rows ready for ``_bulkCreate``. Walks every account
chronologically through all years observed in the journal so the
cumulative balance and per-period opening are exact (within the
bounds of the imported window).
"""
movements = self._aggregateJournalMovements(featureInstanceId, modelEntry, modelLine)
observedYears: set = set()
observedAccounts: set = set()
for (accNo, year, month) in movements:
observedYears.add(year) observedYears.add(year)
observedAccounts.add(accNo) observedAccounts.add(accNo)

View file

@ -46,7 +46,7 @@ class TrusteeOrganisation(PowerOnModel):
description="Mandate ID (system-level organisation)", description="Mandate ID (system-level organisation)",
json_schema_extra={ json_schema_extra={
"label": "Mandat", "label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -57,7 +57,7 @@ class TrusteeOrganisation(PowerOnModel):
description="Feature Instance ID for instance-level isolation", description="Feature Instance ID for instance-level isolation",
json_schema_extra={ json_schema_extra={
"label": "Feature-Instanz", "label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -92,7 +92,7 @@ class TrusteeRole(PowerOnModel):
description="Mandate ID", description="Mandate ID",
json_schema_extra={ json_schema_extra={
"label": "Mandat", "label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -103,7 +103,7 @@ class TrusteeRole(PowerOnModel):
description="Feature Instance ID for instance-level isolation", description="Feature Instance ID for instance-level isolation",
json_schema_extra={ json_schema_extra={
"label": "Feature-Instanz", "label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -132,7 +132,7 @@ class TrusteeAccess(PowerOnModel):
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": True, "frontend_required": True,
"frontend_options": "/api/trustee/{instanceId}/organisations/options", "frontend_options": "/api/trustee/{instanceId}/organisations/options",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation"}, "fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation", "labelField": "label"},
} }
) )
roleId: str = Field( roleId: str = Field(
@ -143,7 +143,7 @@ class TrusteeAccess(PowerOnModel):
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": True, "frontend_required": True,
"frontend_options": "/api/trustee/{instanceId}/roles/options", "frontend_options": "/api/trustee/{instanceId}/roles/options",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeRole"}, "fk_target": {"db": "poweron_trustee", "table": "TrusteeRole", "labelField": "desc"},
} }
) )
userId: str = Field( userId: str = Field(
@ -154,7 +154,7 @@ class TrusteeAccess(PowerOnModel):
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": True, "frontend_required": True,
"frontend_options": "/api/users/options", "frontend_options": "/api/users/options",
"fk_target": {"db": "poweron_app", "table": "User"}, "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
} }
) )
contractId: Optional[str] = Field( contractId: Optional[str] = Field(
@ -167,7 +167,7 @@ class TrusteeAccess(PowerOnModel):
"frontend_required": False, "frontend_required": False,
"frontend_options": "/api/trustee/{instanceId}/contracts/options", "frontend_options": "/api/trustee/{instanceId}/contracts/options",
"frontend_depends_on": "organisationId", "frontend_depends_on": "organisationId",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeContract"}, "fk_target": {"db": "poweron_trustee", "table": "TrusteeContract", "labelField": "label"},
} }
) )
mandateId: Optional[str] = Field( mandateId: Optional[str] = Field(
@ -175,7 +175,7 @@ class TrusteeAccess(PowerOnModel):
description="Mandate ID", description="Mandate ID",
json_schema_extra={ json_schema_extra={
"label": "Mandat", "label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -186,7 +186,7 @@ class TrusteeAccess(PowerOnModel):
description="Feature Instance ID for instance-level isolation", description="Feature Instance ID for instance-level isolation",
json_schema_extra={ json_schema_extra={
"label": "Feature-Instanz", "label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -215,7 +215,7 @@ class TrusteeContract(PowerOnModel):
"frontend_readonly": False, # Editable at creation, then readonly "frontend_readonly": False, # Editable at creation, then readonly
"frontend_required": True, "frontend_required": True,
"frontend_options": "/api/trustee/{instanceId}/organisations/options", "frontend_options": "/api/trustee/{instanceId}/organisations/options",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation"}, "fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation", "labelField": "label"},
} }
) )
label: str = Field( label: str = Field(
@ -242,7 +242,7 @@ class TrusteeContract(PowerOnModel):
description="Mandate ID", description="Mandate ID",
json_schema_extra={ json_schema_extra={
"label": "Mandat", "label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -253,7 +253,7 @@ class TrusteeContract(PowerOnModel):
description="Feature Instance ID for instance-level isolation", description="Feature Instance ID for instance-level isolation",
json_schema_extra={ json_schema_extra={
"label": "Feature-Instanz", "label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False "frontend_required": False
@ -311,7 +311,7 @@ class TrusteeDocument(PowerOnModel):
"frontend_type": "file_reference", "frontend_type": "file_reference",
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": False, "frontend_required": False,
"fk_target": {"db": "poweron_management", "table": "FileItem"}, "fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"},
} }
) )
documentName: str = Field( documentName: str = Field(
@ -359,7 +359,7 @@ class TrusteeDocument(PowerOnModel):
description="Mandate ID (auto-set from context)", description="Mandate ID (auto-set from context)",
json_schema_extra={ json_schema_extra={
"label": "Mandat", "label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
@ -371,7 +371,7 @@ class TrusteeDocument(PowerOnModel):
description="Feature Instance ID for instance-level isolation (auto-set from context)", description="Feature Instance ID for instance-level isolation (auto-set from context)",
json_schema_extra={ json_schema_extra={
"label": "Feature-Instanz", "label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
@ -439,7 +439,7 @@ class TrusteePosition(PowerOnModel):
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": False, "frontend_required": False,
"frontend_options": "/api/trustee/{instanceId}/documents/options", "frontend_options": "/api/trustee/{instanceId}/documents/options",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument"}, "fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument", "labelField": "documentName"},
} }
) )
bankDocumentId: Optional[str] = Field( bankDocumentId: Optional[str] = Field(
@ -451,12 +451,12 @@ class TrusteePosition(PowerOnModel):
"frontend_readonly": False, "frontend_readonly": False,
"frontend_required": False, "frontend_required": False,
"frontend_options": "/api/trustee/{instanceId}/documents/options", "frontend_options": "/api/trustee/{instanceId}/documents/options",
"fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument"}, "fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument", "labelField": "documentName"},
} }
) )
valuta: Optional[str] = Field( valuta: Optional[float] = Field(
default=None, default=None,
description="Value date (ISO format: YYYY-MM-DD)", description="Value date (UTC midnight unix timestamp)",
json_schema_extra={ json_schema_extra={
"label": "Valutadatum", "label": "Valutadatum",
"frontend_type": "date", "frontend_type": "date",
@ -684,9 +684,9 @@ class TrusteePosition(PowerOnModel):
"frontend_required": False "frontend_required": False
} }
) )
dueDate: Optional[str] = Field( dueDate: Optional[float] = Field(
default=None, default=None,
description="Payment due date (ISO format: YYYY-MM-DD)", description="Payment due date (UTC midnight unix timestamp)",
json_schema_extra={ json_schema_extra={
"label": "Fälligkeitsdatum", "label": "Fälligkeitsdatum",
"frontend_type": "date", "frontend_type": "date",
@ -699,7 +699,7 @@ class TrusteePosition(PowerOnModel):
description="Mandate ID (auto-set from context)", description="Mandate ID (auto-set from context)",
json_schema_extra={ json_schema_extra={
"label": "Mandat", "label": "Mandat",
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
@ -711,7 +711,7 @@ class TrusteePosition(PowerOnModel):
description="Feature Instance ID for instance-level isolation (auto-set from context)", description="Feature Instance ID for instance-level isolation (auto-set from context)",
json_schema_extra={ json_schema_extra={
"label": "Feature-Instanz", "label": "Feature-Instanz",
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": False, "frontend_required": False,
@ -742,15 +742,15 @@ class TrusteeDataAccount(PowerOnModel):
accountGroup: Optional[str] = Field(default=None, description="Account group/category", json_schema_extra={"label": "Gruppe"}) accountGroup: Optional[str] = Field(default=None, description="Account group/category", json_schema_extra={"label": "Gruppe"})
currency: str = Field(default="CHF", description="Account currency", json_schema_extra={"label": "Währung"}) currency: str = Field(default="CHF", description="Account currency", json_schema_extra={"label": "Währung"})
isActive: bool = Field(default=True, json_schema_extra={"label": "Aktiv"}) isActive: bool = Field(default=True, json_schema_extra={"label": "Aktiv"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}}) mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}) featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
@i18nModel("Buchung (Sync)") @i18nModel("Buchung (Sync)")
class TrusteeDataJournalEntry(PowerOnModel): class TrusteeDataJournalEntry(PowerOnModel):
"""Journal entry header synced from external accounting system.""" """Journal entry header synced from external accounting system."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"}) id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
externalId: Optional[str] = Field(default=None, description="ID in the source system", json_schema_extra={"label": "Externe ID"}) externalId: Optional[str] = Field(default=None, description="ID in the source system", json_schema_extra={"label": "Externe ID"})
bookingDate: Optional[str] = Field(default=None, description="Booking date (YYYY-MM-DD)", json_schema_extra={"label": "Datum"}) bookingDate: Optional[float] = Field(default=None, description="Booking date (UTC unix timestamp)", json_schema_extra={"label": "Datum", "frontend_type": "timestamp"})
reference: Optional[str] = Field(default=None, description="Booking reference / voucher number", json_schema_extra={"label": "Referenz"}) reference: Optional[str] = Field(default=None, description="Booking reference / voucher number", json_schema_extra={"label": "Referenz"})
description: str = Field(default="", description="Booking text", json_schema_extra={"label": "Beschreibung"}) description: str = Field(default="", description="Booking text", json_schema_extra={"label": "Beschreibung"})
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"}) currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
@ -763,14 +763,14 @@ class TrusteeDataJournalEntry(PowerOnModel):
"frontend_format": "R:#'###.00", "frontend_format": "R:#'###.00",
}, },
) )
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}}) mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}) featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
@i18nModel("Buchungszeile (Sync)") @i18nModel("Buchungszeile (Sync)")
class TrusteeDataJournalLine(PowerOnModel): class TrusteeDataJournalLine(PowerOnModel):
"""Journal entry line (debit/credit) synced from external accounting system.""" """Journal entry line (debit/credit) synced from external accounting system."""
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"}) id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
journalEntryId: str = Field(description="FK → TrusteeDataJournalEntry.id", json_schema_extra={"label": "Buchung", "fk_target": {"db": "poweron_trustee", "table": "TrusteeDataJournalEntry"}}) journalEntryId: str = Field(description="FK → TrusteeDataJournalEntry.id", json_schema_extra={"label": "Buchung", "fk_target": {"db": "poweron_trustee", "table": "TrusteeDataJournalEntry", "labelField": "reference"}})
accountNumber: str = Field(description="Account number", json_schema_extra={"label": "Konto"}) accountNumber: str = Field(description="Account number", json_schema_extra={"label": "Konto"})
debitAmount: float = Field(default=0.0, json_schema_extra={"label": "Soll", "frontend_format": "R:#'###.00"}) debitAmount: float = Field(default=0.0, json_schema_extra={"label": "Soll", "frontend_format": "R:#'###.00"})
creditAmount: float = Field(default=0.0, json_schema_extra={"label": "Haben", "frontend_format": "R:#'###.00"}) creditAmount: float = Field(default=0.0, json_schema_extra={"label": "Haben", "frontend_format": "R:#'###.00"})
@ -778,8 +778,8 @@ class TrusteeDataJournalLine(PowerOnModel):
taxCode: Optional[str] = Field(default=None, json_schema_extra={"label": "Steuercode"}) taxCode: Optional[str] = Field(default=None, json_schema_extra={"label": "Steuercode"})
costCenter: Optional[str] = Field(default=None, json_schema_extra={"label": "Kostenstelle"}) costCenter: Optional[str] = Field(default=None, json_schema_extra={"label": "Kostenstelle"})
description: str = Field(default="", json_schema_extra={"label": "Beschreibung"}) description: str = Field(default="", json_schema_extra={"label": "Beschreibung"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}}) mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}) featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
@i18nModel("Kontakt (Sync)") @i18nModel("Kontakt (Sync)")
class TrusteeDataContact(PowerOnModel): class TrusteeDataContact(PowerOnModel):
@ -796,8 +796,8 @@ class TrusteeDataContact(PowerOnModel):
email: Optional[str] = Field(default=None, json_schema_extra={"label": "E-Mail"}) email: Optional[str] = Field(default=None, json_schema_extra={"label": "E-Mail"})
phone: Optional[str] = Field(default=None, json_schema_extra={"label": "Telefon"}) phone: Optional[str] = Field(default=None, json_schema_extra={"label": "Telefon"})
vatNumber: Optional[str] = Field(default=None, json_schema_extra={"label": "MWST-Nr."}) vatNumber: Optional[str] = Field(default=None, json_schema_extra={"label": "MWST-Nr."})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}}) mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}) featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
@i18nModel("Kontosaldo (Sync)") @i18nModel("Kontosaldo (Sync)")
class TrusteeDataAccountBalance(PowerOnModel): class TrusteeDataAccountBalance(PowerOnModel):
@ -811,8 +811,8 @@ class TrusteeDataAccountBalance(PowerOnModel):
creditTotal: float = Field(default=0.0, json_schema_extra={"label": "Haben-Umsatz", "frontend_format": "R:#'###.00"}) creditTotal: float = Field(default=0.0, json_schema_extra={"label": "Haben-Umsatz", "frontend_format": "R:#'###.00"})
closingBalance: float = Field(default=0.0, json_schema_extra={"label": "Schlusssaldo", "frontend_format": "R:#'###.00"}) closingBalance: float = Field(default=0.0, json_schema_extra={"label": "Schlusssaldo", "frontend_format": "R:#'###.00"})
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"}) currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}}) mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}) featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
@i18nModel("Buchhaltungs-Konfiguration") @i18nModel("Buchhaltungs-Konfiguration")
class TrusteeAccountingConfig(PowerOnModel): class TrusteeAccountingConfig(PowerOnModel):
@ -822,20 +822,20 @@ class TrusteeAccountingConfig(PowerOnModel):
Credentials are stored encrypted (decrypted at runtime by the AccountingBridge). Credentials are stored encrypted (decrypted at runtime by the AccountingBridge).
""" """
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"}) id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
featureInstanceId: str = Field(description="FK -> FeatureInstance.id (1:1)", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}) featureInstanceId: str = Field(description="FK -> FeatureInstance.id (1:1)", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
connectorType: str = Field(description="Connector type key, e.g. 'rma', 'bexio', 'abacus'", json_schema_extra={"label": "System"}) connectorType: str = Field(description="Connector type key, e.g. 'rma', 'bexio', 'abacus'", json_schema_extra={"label": "System"})
displayLabel: str = Field(default="", description="User-visible label for this integration", json_schema_extra={"label": "Bezeichnung"}) displayLabel: str = Field(default="", description="User-visible label for this integration", json_schema_extra={"label": "Bezeichnung"})
encryptedConfig: str = Field(default="", description="Encrypted JSON blob with connector credentials", json_schema_extra={"label": "Verschlüsselte Konfiguration"}) encryptedConfig: str = Field(default="", description="Encrypted JSON blob with connector credentials", json_schema_extra={"label": "Verschlüsselte Konfiguration"})
isActive: bool = Field(default=True, json_schema_extra={"label": "Aktiv"}) isActive: bool = Field(default=True, json_schema_extra={"label": "Aktiv"})
lastSyncAt: Optional[float] = Field(default=None, description="Timestamp of last sync attempt", json_schema_extra={"label": "Letzte Synchronisation"}) lastSyncAt: Optional[float] = Field(default=None, description="Timestamp of last sync attempt", json_schema_extra={"label": "Letzte Synchronisation", "frontend_type": "timestamp"})
lastSyncStatus: Optional[str] = Field(default=None, description="Last sync result: success, error, partial", json_schema_extra={"label": "Status"}) lastSyncStatus: Optional[str] = Field(default=None, description="Last sync result: success, error, partial", json_schema_extra={"label": "Status"})
lastSyncErrorMessage: Optional[str] = Field(default=None, description="Error message when lastSyncStatus is error", json_schema_extra={"label": "Fehlermeldung"}) lastSyncErrorMessage: Optional[str] = Field(default=None, description="Error message when lastSyncStatus is error", json_schema_extra={"label": "Fehlermeldung"})
lastSyncDateFrom: Optional[str] = Field(default=None, description="dateFrom (ISO date) of the last data import window", json_schema_extra={"label": "Letztes Import-Fenster von"}) lastSyncDateFrom: Optional[float] = Field(default=None, description="dateFrom (UTC midnight unix timestamp) of the last data import window", json_schema_extra={"label": "Letztes Import-Fenster von", "frontend_type": "date"})
lastSyncDateTo: Optional[str] = Field(default=None, description="dateTo (ISO date) of the last data import window", json_schema_extra={"label": "Letztes Import-Fenster bis"}) lastSyncDateTo: Optional[float] = Field(default=None, description="dateTo (UTC midnight unix timestamp) of the last data import window", json_schema_extra={"label": "Letztes Import-Fenster bis", "frontend_type": "date"})
lastSyncCounts: Optional[Dict[str, Any]] = Field(default=None, description="Last import summary: per-entity counts (accounts, journalEntries, journalLines, contacts, accountBalances) plus oldestBookingDate / newestBookingDate (ISO YYYY-MM-DD) for completeness verification", json_schema_extra={"label": "Letzte Import-Zaehler"}) lastSyncCounts: Optional[Dict[str, Any]] = Field(default=None, description="Last import summary: per-entity counts (accounts, journalEntries, journalLines, contacts, accountBalances) plus oldestBookingDate / newestBookingDate (ISO YYYY-MM-DD) for completeness verification", json_schema_extra={"label": "Letzte Import-Zaehler"})
cachedChartOfAccounts: Optional[str] = Field(default=None, description="JSON-serialised chart of accounts cache (list of {accountNumber, label, accountType})", json_schema_extra={"label": "Cached Kontoplan"}) cachedChartOfAccounts: Optional[str] = Field(default=None, description="JSON-serialised chart of accounts cache (list of {accountNumber, label, accountType})", json_schema_extra={"label": "Cached Kontoplan"})
chartCachedAt: Optional[float] = Field(default=None, description="Timestamp when cachedChartOfAccounts was last refreshed", json_schema_extra={"label": "Kontoplan-Cache-Zeitpunkt"}) chartCachedAt: Optional[float] = Field(default=None, description="Timestamp when cachedChartOfAccounts was last refreshed", json_schema_extra={"label": "Kontoplan-Cache-Zeitpunkt", "frontend_type": "timestamp"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}}) mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
@i18nModel("Buchhaltungs-Synchronisation") @i18nModel("Buchhaltungs-Synchronisation")
class TrusteeAccountingSync(PowerOnModel): class TrusteeAccountingSync(PowerOnModel):
@ -846,16 +846,16 @@ class TrusteeAccountingSync(PowerOnModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"}) id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
positionId: str = Field( positionId: str = Field(
description="FK -> TrusteePosition.id", description="FK -> TrusteePosition.id",
json_schema_extra={"label": "Position", "fk_target": {"db": "poweron_trustee", "table": "TrusteePosition"}}, json_schema_extra={"label": "Position", "fk_target": {"db": "poweron_trustee", "table": "TrusteePosition", "labelField": None}},
) )
featureInstanceId: str = Field(description="FK -> FeatureInstance.id", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}}) featureInstanceId: str = Field(description="FK -> FeatureInstance.id", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
connectorType: str = Field(description="Connector type at time of sync", json_schema_extra={"label": "System"}) connectorType: str = Field(description="Connector type at time of sync", json_schema_extra={"label": "System"})
externalId: Optional[str] = Field(default=None, description="ID assigned by the external system", json_schema_extra={"label": "Externe ID"}) externalId: Optional[str] = Field(default=None, description="ID assigned by the external system", json_schema_extra={"label": "Externe ID"})
externalReference: Optional[str] = Field(default=None, description="Reference in the external system", json_schema_extra={"label": "Externe Referenz"}) externalReference: Optional[str] = Field(default=None, description="Reference in the external system", json_schema_extra={"label": "Externe Referenz"})
syncStatus: str = Field(default="pending", description="pending | synced | error | cancelled", json_schema_extra={"label": "Status"}) syncStatus: str = Field(default="pending", description="pending | synced | error | cancelled", json_schema_extra={"label": "Status"})
syncDirection: str = Field(default="push", description="push (local->ext) or pull (ext->local)", json_schema_extra={"label": "Richtung"}) syncDirection: str = Field(default="push", description="push (local->ext) or pull (ext->local)", json_schema_extra={"label": "Richtung"})
syncedAt: Optional[float] = Field(default=None, description="Timestamp of successful sync", json_schema_extra={"label": "Synchronisiert am"}) syncedAt: Optional[float] = Field(default=None, description="Timestamp of successful sync", json_schema_extra={"label": "Synchronisiert am", "frontend_type": "timestamp"})
errorMessage: Optional[str] = Field(default=None, json_schema_extra={"label": "Fehler"}) errorMessage: Optional[str] = Field(default=None, json_schema_extra={"label": "Fehler"})
bookingPayload: Optional[dict] = Field(default=None, description="Payload sent to the external system (audit)", json_schema_extra={"label": "Buchungs-Payload"}) bookingPayload: Optional[dict] = Field(default=None, description="Payload sent to the external system (audit)", json_schema_extra={"label": "Buchungs-Payload"})
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}}) mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})

View file

@ -126,13 +126,11 @@ def _sanitisePositionPayload(data: Dict[str, Any]) -> Dict[str, Any]:
"""Failsafe normalisation for TrusteePosition payloads before DB writes.""" """Failsafe normalisation for TrusteePosition payloads before DB writes."""
safeData = dict(data or {}) safeData = dict(data or {})
isoValuta = _normaliseIsoDate(safeData.get("valuta")) valutaTs = _normaliseTimestamp(safeData.get("valuta"))
safeData["valuta"] = isoValuta safeData["valuta"] = valutaTs
safeData["transactionDateTime"] = _normaliseTimestamp( txTs = _normaliseTimestamp(safeData.get("transactionDateTime"))
safeData.get("transactionDateTime"), safeData["transactionDateTime"] = txTs if txTs is not None else valutaTs
fallbackIsoDate=isoValuta,
)
safeData["bookingAmount"] = _toSafeFloat(safeData.get("bookingAmount"), defaultValue=0.0) safeData["bookingAmount"] = _toSafeFloat(safeData.get("bookingAmount"), defaultValue=0.0)
safeData["originalAmount"] = _toSafeFloat( safeData["originalAmount"] = _toSafeFloat(
@ -148,7 +146,7 @@ def _sanitisePositionPayload(data: Dict[str, Any]) -> Dict[str, Any]:
safeData["originalCurrency"] = str(originalCurrency).upper() safeData["originalCurrency"] = str(originalCurrency).upper()
if "dueDate" in safeData and safeData["dueDate"]: if "dueDate" in safeData and safeData["dueDate"]:
safeData["dueDate"] = _normaliseIsoDate(safeData["dueDate"]) safeData["dueDate"] = _normaliseTimestamp(safeData["dueDate"])
_VALID_DOC_TYPES = {"invoice", "expense_receipt", "bank_document", "contract", "unknown"} _VALID_DOC_TYPES = {"invoice", "expense_receipt", "bank_document", "contract", "unknown"}
docType = safeData.get("documentType") docType = safeData.get("documentType")

View file

@ -393,9 +393,10 @@ def get_position_options(
items = result.items if hasattr(result, 'items') else result items = result.items if hasattr(result, 'items') else result
def _makePositionLabel(p: TrusteePosition) -> str: def _makePositionLabel(p: TrusteePosition) -> str:
from datetime import datetime as _dt, timezone as _tz
parts = [] parts = []
if p.valuta: if p.valuta:
parts.append(str(p.valuta)[:10]) # Datum ohne Zeit parts.append(_dt.fromtimestamp(p.valuta, tz=_tz.utc).strftime("%Y-%m-%d"))
if p.company: if p.company:
parts.append(p.company[:30]) parts.append(p.company[:30])
if p.desc: if p.desc:
@ -978,33 +979,27 @@ def get_documents(
def _handleDocumentMode(instanceId, mandateId, mode, column, pagination, context): def _handleDocumentMode(instanceId, mandateId, mode, column, pagination, context):
"""Handle mode=filterValues and mode=ids for trustee documents.""" """Handle mode=filterValues and mode=ids for trustee documents."""
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory from modules.routes.routeHelpers import handleIdsInMemory
if mode == "filterValues": if mode == "filterValues":
if not column: if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues") raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
try: from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC
from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC from modules.routes.routeHelpers import parseCrossFilterPagination
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId) from fastapi.responses import JSONResponse
from modules.routes.routeHelpers import parseCrossFilterPagination interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
crossFilterPagination = parseCrossFilterPagination(column, pagination) crossFilterPagination = parseCrossFilterPagination(column, pagination)
from fastapi.responses import JSONResponse values = getDistinctColumnValuesWithRBAC(
values = getDistinctColumnValuesWithRBAC( connector=interface.db,
connector=interface.db, modelClass=TrusteeDocument,
modelClass=TrusteeDocument, column=column,
column=column, currentUser=interface.currentUser,
currentUser=interface.currentUser, pagination=crossFilterPagination,
pagination=crossFilterPagination, recordFilter=None,
recordFilter=None, mandateId=interface.mandateId,
mandateId=interface.mandateId, featureInstanceId=interface.featureInstanceId,
featureInstanceId=interface.featureInstanceId, featureCode=interface.FEATURE_CODE
featureCode=interface.FEATURE_CODE )
) return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
except Exception:
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllDocuments(None)
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
return handleFilterValuesInMemory(items, column, pagination)
if mode == "ids": if mode == "ids":
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId) interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllDocuments(None) result = interface.getAllDocuments(None)
@ -1227,33 +1222,27 @@ def get_positions(
def _handlePositionMode(instanceId, mandateId, mode, column, pagination, context): def _handlePositionMode(instanceId, mandateId, mode, column, pagination, context):
"""Handle mode=filterValues and mode=ids for trustee positions.""" """Handle mode=filterValues and mode=ids for trustee positions."""
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory from modules.routes.routeHelpers import handleIdsInMemory
if mode == "filterValues": if mode == "filterValues":
if not column: if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues") raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
try: from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC
from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC from modules.routes.routeHelpers import parseCrossFilterPagination
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId) from fastapi.responses import JSONResponse
from modules.routes.routeHelpers import parseCrossFilterPagination interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
crossFilterPagination = parseCrossFilterPagination(column, pagination) crossFilterPagination = parseCrossFilterPagination(column, pagination)
from fastapi.responses import JSONResponse values = getDistinctColumnValuesWithRBAC(
values = getDistinctColumnValuesWithRBAC( connector=interface.db,
connector=interface.db, modelClass=TrusteePosition,
modelClass=TrusteePosition, column=column,
column=column, currentUser=interface.currentUser,
currentUser=interface.currentUser, pagination=crossFilterPagination,
pagination=crossFilterPagination, recordFilter=None,
recordFilter=None, mandateId=interface.mandateId,
mandateId=interface.mandateId, featureInstanceId=interface.featureInstanceId,
featureInstanceId=interface.featureInstanceId, featureCode=interface.FEATURE_CODE
featureCode=interface.FEATURE_CODE )
) return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
except Exception:
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllPositions(None)
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
return handleFilterValuesInMemory(items, column, pagination)
if mode == "ids": if mode == "ids":
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId) interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
result = interface.getAllPositions(None) result = interface.getAllPositions(None)
@ -2338,6 +2327,63 @@ def delete_instance_role_rule(
# (Unified Filter API: mode=filterValues / mode=ids). # (Unified Filter API: mode=filterValues / mode=ids).
def _buildFeatureInternalResolvers(modelClass, db) -> Dict[str, Any]:
"""Build ``extraResolvers`` for FK fields that point to other Trustee models.
The builtin ``enrichRowsWithFkLabels`` only covers Mandate / FeatureInstance /
User / Role. Feature-internal FKs (e.g. ``journalEntryId`` -> ``TrusteeDataJournalEntry``)
need a resolver that queries the Trustee DB. This function discovers such fields
from the Pydantic model's ``fk_target`` annotations and creates a resolver per field.
Label strategy per target model:
- ``TrusteeDataJournalEntry``: ``"<externalId> | <bookingDate>"``
- Generic fallback: ``"<externalId>"`` or ``"<id[:8]>"``
"""
resolvers: Dict[str, Any] = {}
for name, fieldInfo in modelClass.model_fields.items():
extra = fieldInfo.json_schema_extra
if not extra or not isinstance(extra, dict):
continue
tgt = extra.get("fk_target")
if not isinstance(tgt, dict):
continue
tableName = tgt.get("table", "")
if tableName not in _TRUSTEE_ENTITY_MODELS:
continue
targetModel = _TRUSTEE_ENTITY_MODELS[tableName]
def _makeResolver(model, field=name):
def _resolve(ids: List[str]) -> Dict[str, Optional[str]]:
result: Dict[str, Optional[str]] = {i: None for i in ids}
try:
recs = db.getRecordset(model, recordFilter={"id": list(set(ids))}) or []
except Exception:
return result
for r in recs:
row = r if isinstance(r, dict) else r.model_dump() if hasattr(r, "model_dump") else {}
rid = row.get("id", "")
parts = []
for col in ("externalId", "reference", "bookingDate", "label", "name", "accountNumber"):
val = row.get(col)
if val is not None and val != "":
if col == "bookingDate" and isinstance(val, (int, float)):
from datetime import datetime, timezone
try:
parts.append(datetime.fromtimestamp(val, tz=timezone.utc).strftime("%Y-%m-%d"))
except Exception:
parts.append(str(val))
else:
parts.append(str(val))
if len(parts) >= 2:
break
result[rid] = " | ".join(parts) if parts else rid[:8]
return result
return _resolve
resolvers[name] = _makeResolver(targetModel)
return resolvers
def _paginatedReadEndpoint( def _paginatedReadEndpoint(
*, *,
instanceId: str, instanceId: str,
@ -2359,7 +2405,6 @@ def _paginatedReadEndpoint(
getDistinctColumnValuesWithRBAC, getDistinctColumnValuesWithRBAC,
) )
from modules.routes.routeHelpers import ( from modules.routes.routeHelpers import (
handleFilterValuesInMemory,
handleIdsInMemory, handleIdsInMemory,
parseCrossFilterPagination, parseCrossFilterPagination,
enrichRowsWithFkLabels, enrichRowsWithFkLabels,
@ -2372,34 +2417,19 @@ def _paginatedReadEndpoint(
if mode == "filterValues": if mode == "filterValues":
if not column: if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues") raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
try: crossFilterPagination = parseCrossFilterPagination(column, pagination)
crossFilterPagination = parseCrossFilterPagination(column, pagination) values = getDistinctColumnValuesWithRBAC(
values = getDistinctColumnValuesWithRBAC( connector=interface.db,
connector=interface.db, modelClass=modelClass,
modelClass=modelClass, column=column,
column=column, currentUser=interface.currentUser,
currentUser=interface.currentUser, pagination=crossFilterPagination,
pagination=crossFilterPagination, recordFilter=None,
recordFilter=None, mandateId=interface.mandateId,
mandateId=interface.mandateId, featureInstanceId=interface.featureInstanceId,
featureInstanceId=interface.featureInstanceId, featureCode=interface.FEATURE_CODE,
featureCode=interface.FEATURE_CODE, )
) return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
except Exception:
result = getRecordsetPaginatedWithRBAC(
connector=interface.db,
modelClass=modelClass,
currentUser=interface.currentUser,
pagination=None,
recordFilter=None,
mandateId=interface.mandateId,
featureInstanceId=interface.featureInstanceId,
featureCode=interface.FEATURE_CODE,
)
items = result.items if hasattr(result, "items") else result
items = [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
return handleFilterValuesInMemory(items, column, pagination)
if mode == "ids": if mode == "ids":
result = getRecordsetPaginatedWithRBAC( result = getRecordsetPaginatedWithRBAC(
@ -2431,8 +2461,13 @@ def _paginatedReadEndpoint(
def _itemsToDicts(rawItems): def _itemsToDicts(rawItems):
return [r.model_dump() if hasattr(r, "model_dump") else r for r in rawItems] return [r.model_dump() if hasattr(r, "model_dump") else r for r in rawItems]
featureResolvers = _buildFeatureInternalResolvers(modelClass, interface.db)
if paginationParams and hasattr(result, "items"): if paginationParams and hasattr(result, "items"):
enriched = enrichRowsWithFkLabels(_itemsToDicts(result.items), modelClass) enriched = enrichRowsWithFkLabels(
_itemsToDicts(result.items), modelClass,
extraResolvers=featureResolvers or None,
)
return { return {
"items": enriched, "items": enriched,
"pagination": PaginationMetadata( "pagination": PaginationMetadata(
@ -2445,7 +2480,10 @@ def _paginatedReadEndpoint(
).model_dump(), ).model_dump(),
} }
items = result.items if hasattr(result, "items") else result items = result.items if hasattr(result, "items") else result
enriched = enrichRowsWithFkLabels(_itemsToDicts(items), modelClass) enriched = enrichRowsWithFkLabels(
_itemsToDicts(items), modelClass,
extraResolvers=featureResolvers or None,
)
return {"items": enriched, "pagination": None} return {"items": enriched, "pagination": None}

View file

@ -24,7 +24,7 @@ class WorkspaceUserSettings(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "User"}, "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
}, },
) )
mandateId: str = Field( mandateId: str = Field(
@ -34,7 +34,7 @@ class WorkspaceUserSettings(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "Mandate"}, "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
}, },
) )
featureInstanceId: str = Field( featureInstanceId: str = Field(
@ -44,7 +44,7 @@ class WorkspaceUserSettings(PowerOnModel):
"frontend_type": "text", "frontend_type": "text",
"frontend_readonly": True, "frontend_readonly": True,
"frontend_required": True, "frontend_required": True,
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}, "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
}, },
) )
maxAgentRounds: Optional[int] = Field( maxAgentRounds: Optional[int] = Field(

View file

@ -1599,18 +1599,19 @@ class AppObjects:
from datetime import datetime, timezone, timedelta from datetime import datetime, timezone, timedelta
now = datetime.now(timezone.utc) now = datetime.now(timezone.utc)
nowTs = now.timestamp()
targetStatus = SubscriptionStatusEnum.TRIALING if plan.trialDays else SubscriptionStatusEnum.ACTIVE targetStatus = SubscriptionStatusEnum.TRIALING if plan.trialDays else SubscriptionStatusEnum.ACTIVE
subscription = MandateSubscription( subscription = MandateSubscription(
mandateId=mandateId, mandateId=mandateId,
planKey=planKey, planKey=planKey,
status=targetStatus, status=targetStatus,
startedAt=now.isoformat(), startedAt=nowTs,
currentPeriodStart=now.isoformat(), currentPeriodStart=nowTs,
) )
if plan.trialDays: if plan.trialDays:
trialEnd = now + timedelta(days=plan.trialDays) trialEnd = now + timedelta(days=plan.trialDays)
subscription.trialEndsAt = trialEnd.isoformat() subscription.trialEndsAt = trialEnd.timestamp()
subscription.currentPeriodEnd = trialEnd.isoformat() subscription.currentPeriodEnd = trialEnd.timestamp()
subInterface = _getSubRoot() subInterface = _getSubRoot()
subInterface.createSubscription(subscription) subInterface.createSubscription(subscription)
@ -1716,19 +1717,19 @@ class AppObjects:
targetStatus = SubscriptionStatusEnum.TRIALING if plan and plan.trialDays else SubscriptionStatusEnum.ACTIVE targetStatus = SubscriptionStatusEnum.TRIALING if plan and plan.trialDays else SubscriptionStatusEnum.ACTIVE
additionalData = { additionalData = {
"currentPeriodStart": now.isoformat(), "currentPeriodStart": now.timestamp(),
} }
if plan and plan.trialDays: if plan and plan.trialDays:
trialEnd = now + timedelta(days=plan.trialDays) trialEnd = now + timedelta(days=plan.trialDays)
additionalData["trialEndsAt"] = trialEnd.isoformat() additionalData["trialEndsAt"] = trialEnd.timestamp()
additionalData["currentPeriodEnd"] = trialEnd.isoformat() additionalData["currentPeriodEnd"] = trialEnd.timestamp()
elif plan and plan.billingPeriod: elif plan and plan.billingPeriod:
from modules.datamodels.datamodelSubscription import BillingPeriodEnum from modules.datamodels.datamodelSubscription import BillingPeriodEnum
if plan.billingPeriod == BillingPeriodEnum.MONTHLY: if plan.billingPeriod == BillingPeriodEnum.MONTHLY:
additionalData["currentPeriodEnd"] = (now + timedelta(days=30)).isoformat() additionalData["currentPeriodEnd"] = (now + timedelta(days=30)).timestamp()
elif plan.billingPeriod == BillingPeriodEnum.YEARLY: elif plan.billingPeriod == BillingPeriodEnum.YEARLY:
additionalData["currentPeriodEnd"] = (now + timedelta(days=365)).isoformat() additionalData["currentPeriodEnd"] = (now + timedelta(days=365)).timestamp()
try: try:
subInterface.transitionStatus( subInterface.transitionStatus(

View file

@ -884,9 +884,10 @@ class BillingObjects:
periodStartAt = periodStartAt.replace(tzinfo=timezone.utc) periodStartAt = periodStartAt.replace(tzinfo=timezone.utc)
else: else:
periodStartAt = periodStartAt.astimezone(timezone.utc) periodStartAt = periodStartAt.astimezone(timezone.utc)
periodStartTs = periodStartAt.timestamp()
settings = self.getOrCreateSettings(mandateId) settings = self.getOrCreateSettings(mandateId)
prev = self._parseSettingsDateTime(settings.get("storagePeriodStartAt")) prev = settings.get("storagePeriodStartAt")
if prev is not None and abs((prev - periodStartAt).total_seconds()) < 2: if prev is not None and abs(prev - periodStartTs) < 2:
return return
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
@ -896,7 +897,7 @@ class BillingObjects:
{ {
"storageHighWatermarkMB": usedMB, "storageHighWatermarkMB": usedMB,
"storageBilledUpToMB": 0.0, "storageBilledUpToMB": 0.0,
"storagePeriodStartAt": periodStartAt, "storagePeriodStartAt": periodStartTs,
}, },
) )
logger.info( logger.info(
@ -1044,18 +1045,9 @@ class BillingObjects:
if not periodStart or not periodEnd: if not periodStart or not periodEnd:
return None return None
if isinstance(periodStart, str): nowTs = datetime.now(timezone.utc).timestamp()
periodStart = datetime.fromisoformat(periodStart) totalSeconds = periodEnd - periodStart
if isinstance(periodEnd, str): remainingSeconds = max(periodEnd - nowTs, 0)
periodEnd = datetime.fromisoformat(periodEnd)
if periodStart.tzinfo is None:
periodStart = periodStart.replace(tzinfo=timezone.utc)
if periodEnd.tzinfo is None:
periodEnd = periodEnd.replace(tzinfo=timezone.utc)
now = datetime.now(timezone.utc)
totalSeconds = (periodEnd - periodStart).total_seconds()
remainingSeconds = max((periodEnd - now).total_seconds(), 0)
proRataFraction = remainingSeconds / totalSeconds if totalSeconds > 0 else 0 proRataFraction = remainingSeconds / totalSeconds if totalSeconds > 0 else 0
amount = round(abs(delta) * plan.budgetAiPerUserCHF * proRataFraction, 2) amount = round(abs(delta) * plan.budgetAiPerUserCHF * proRataFraction, 2)
@ -1488,7 +1480,7 @@ class BillingObjects:
@staticmethod @staticmethod
def _mapPaginationColumns(pagination: PaginationParams) -> PaginationParams: def _mapPaginationColumns(pagination: PaginationParams) -> PaginationParams:
"""Remap frontend column names to DB column names in filters and sort.""" """Remap frontend column names to DB column names in filters and sort."""
_COL_MAP = {"createdAt": "sysCreatedAt"} _COL_MAP: dict = {}
_ENRICHED_COLS = {"mandateName", "userName", "mandateId", "userId"} _ENRICHED_COLS = {"mandateName", "userName", "mandateId", "userId"}
import copy import copy
p = copy.deepcopy(pagination) p = copy.deepcopy(pagination)
@ -1974,7 +1966,6 @@ class BillingObjects:
) -> List[str]: ) -> List[str]:
"""SQL DISTINCT for filter-values on BillingTransaction, scoped by mandates.""" """SQL DISTINCT for filter-values on BillingTransaction, scoped by mandates."""
_COLUMN_MAP = { _COLUMN_MAP = {
"createdAt": "sysCreatedAt",
"mandateId": "accountId", "mandateId": "accountId",
"mandateName": "accountId", "mandateName": "accountId",
} }

View file

@ -224,7 +224,7 @@ class SubscriptionObjects:
updateData = {"status": toStatus.value} updateData = {"status": toStatus.value}
if toStatus in TERMINAL_STATUSES and not (additionalData or {}).get("endedAt"): if toStatus in TERMINAL_STATUSES and not (additionalData or {}).get("endedAt"):
updateData["endedAt"] = datetime.now(timezone.utc).isoformat() updateData["endedAt"] = datetime.now(timezone.utc).timestamp()
if additionalData: if additionalData:
updateData.update(additionalData) updateData.update(additionalData)
@ -244,7 +244,7 @@ class SubscriptionObjects:
result = self.db.recordModify(MandateSubscription, subscriptionId, { result = self.db.recordModify(MandateSubscription, subscriptionId, {
"status": SubscriptionStatusEnum.EXPIRED.value, "status": SubscriptionStatusEnum.EXPIRED.value,
"endedAt": datetime.now(timezone.utc).isoformat(), "endedAt": datetime.now(timezone.utc).timestamp(),
}) })
logger.info("Force-expired subscription %s (was %s)", subscriptionId, currentStatus) logger.info("Force-expired subscription %s (was %s)", subscriptionId, currentStatus)
return result return result

View file

@ -25,6 +25,7 @@ GROUP-Berechtigung:
import logging import logging
import json import json
import math import math
import re
from typing import List, Dict, Any, Optional, Type, Union from typing import List, Dict, Any, Optional, Type, Union
from pydantic import BaseModel from pydantic import BaseModel
from modules.datamodels.datamodelRbac import AccessRuleContext from modules.datamodels.datamodelRbac import AccessRuleContext
@ -35,6 +36,138 @@ from modules.security.rootAccess import getRootDbAppConnector
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
_ISO_DATE_RE = re.compile(r"^\d{4}-\d{2}-\d{2}$")
def _rbacAppendPaginationDictFilter(
key: str,
val: Dict[str, Any],
colType: str,
whereConditions: List[str],
whereValues: List[Any],
) -> None:
"""Append SQL for one pagination ``filters`` dict entry (operator + value).
Mirrors ``connectorDbPostgre._buildPaginationClauses`` semantics so numeric
comparisons use ``::double precision`` instead of lexicographic ``::TEXT``.
"""
op = val.get("operator", "equals")
v = val.get("value", "")
isNumericCol = colType in ("INTEGER", "DOUBLE PRECISION")
if op in ("equals", "eq"):
if colType == "BOOLEAN":
whereConditions.append(f'COALESCE("{key}", FALSE) = %s')
whereValues.append(str(v).lower() == "true")
elif isNumericCol:
try:
whereConditions.append(f'"{key}"::double precision = %s')
whereValues.append(float(v))
except (ValueError, TypeError):
whereConditions.append(f'"{key}"::TEXT = %s')
whereValues.append(str(v))
else:
whereConditions.append(f'"{key}"::TEXT = %s')
whereValues.append(str(v))
return
if op == "contains":
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
whereValues.append(f"%{v}%")
return
if op == "startsWith":
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
whereValues.append(f"{v}%")
return
if op == "endsWith":
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
whereValues.append(f"%{v}")
return
if op in ("gt", "gte", "lt", "lte"):
sqlOp = {"gt": ">", "gte": ">=", "lt": "<", "lte": "<="}[op]
if isNumericCol:
try:
whereConditions.append(f'"{key}"::double precision {sqlOp} %s')
whereValues.append(float(v))
except (ValueError, TypeError):
whereConditions.append(f'"{key}"::TEXT {sqlOp} %s')
whereValues.append(str(v))
else:
whereConditions.append(f'"{key}"::TEXT {sqlOp} %s')
whereValues.append(str(v))
return
if op == "between" and isinstance(v, dict):
fromVal = v.get("from", "")
toVal = v.get("to", "")
if not fromVal and not toVal:
return
isDateVal = bool(fromVal and _ISO_DATE_RE.match(str(fromVal))) or bool(
toVal and _ISO_DATE_RE.match(str(toVal))
)
if isNumericCol and isDateVal:
from datetime import datetime as _dt, timezone as _tz
if fromVal and toVal:
fromTs = _dt.strptime(str(fromVal), "%Y-%m-%d").replace(tzinfo=_tz.utc).timestamp()
toTs = _dt.strptime(str(toVal), "%Y-%m-%d").replace(
hour=23, minute=59, second=59, tzinfo=_tz.utc
).timestamp()
whereConditions.append(f'"{key}" >= %s AND "{key}" <= %s')
whereValues.extend([fromTs, toTs])
elif fromVal:
fromTs = _dt.strptime(str(fromVal), "%Y-%m-%d").replace(tzinfo=_tz.utc).timestamp()
whereConditions.append(f'"{key}" >= %s')
whereValues.append(fromTs)
else:
toTs = _dt.strptime(str(toVal), "%Y-%m-%d").replace(
hour=23, minute=59, second=59, tzinfo=_tz.utc
).timestamp()
whereConditions.append(f'"{key}" <= %s')
whereValues.append(toTs)
elif isNumericCol:
try:
if fromVal and toVal:
whereConditions.append(
f'"{key}"::double precision >= %s AND "{key}"::double precision <= %s'
)
whereValues.extend([float(fromVal), float(toVal)])
elif fromVal:
whereConditions.append(f'"{key}"::double precision >= %s')
whereValues.append(float(fromVal))
elif toVal:
whereConditions.append(f'"{key}"::double precision <= %s')
whereValues.append(float(toVal))
except (ValueError, TypeError):
pass
else:
if fromVal and toVal:
whereConditions.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s')
whereValues.extend([str(fromVal), str(toVal)])
elif fromVal:
whereConditions.append(f'"{key}"::TEXT >= %s')
whereValues.append(str(fromVal))
elif toVal:
whereConditions.append(f'"{key}"::TEXT <= %s')
whereValues.append(str(toVal))
return
if op == "in" and isinstance(v, list):
if not v:
whereConditions.append("1 = 0")
else:
whereConditions.append(f'"{key}"::TEXT = ANY(%s)')
whereValues.append([str(x) for x in v])
return
if op == "notIn" and isinstance(v, list):
if v:
whereConditions.append(f'NOT ("{key}"::TEXT = ANY(%s))')
whereValues.append([str(x) for x in v])
return
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
whereValues.append(str(v))
# ============================================================================= # =============================================================================
# Namespace-Mapping für statische Tabellen # Namespace-Mapping für statische Tabellen
@ -401,36 +534,10 @@ def getRecordsetPaginatedWithRBAC(
whereConditions.append(f'("{key}" IS NULL OR "{key}"::TEXT = \'\')') whereConditions.append(f'("{key}" IS NULL OR "{key}"::TEXT = \'\')')
continue continue
if isinstance(val, dict): if isinstance(val, dict):
op = val.get("operator", "equals") colType = fields.get(key, "TEXT")
v = val.get("value", "") _rbacAppendPaginationDictFilter(
if op in ("equals", "eq"): key, val, colType, whereConditions, whereValues
whereConditions.append(f'"{key}"::TEXT = %s') )
whereValues.append(str(v))
elif op == "contains":
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
whereValues.append(f"%{v}%")
elif op == "startsWith":
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
whereValues.append(f"{v}%")
elif op == "endsWith":
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
whereValues.append(f"%{v}")
elif op in ("gt", "gte", "lt", "lte"):
sqlOp = {"gt": ">", "gte": ">=", "lt": "<", "lte": "<="}[op]
whereConditions.append(f'"{key}"::TEXT {sqlOp} %s')
whereValues.append(str(v))
elif op == "between":
fromVal = v.get("from", "") if isinstance(v, dict) else ""
toVal = v.get("to", "") if isinstance(v, dict) else ""
if fromVal and toVal:
whereConditions.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s')
whereValues.extend([str(fromVal), str(toVal)])
elif fromVal:
whereConditions.append(f'"{key}"::TEXT >= %s')
whereValues.append(str(fromVal))
elif toVal:
whereConditions.append(f'"{key}"::TEXT <= %s')
whereValues.append(str(toVal))
else: else:
whereConditions.append(f'"{key}"::TEXT ILIKE %s') whereConditions.append(f'"{key}"::TEXT ILIKE %s')
whereValues.append(str(val)) whereValues.append(str(val))
@ -587,29 +694,10 @@ def getDistinctColumnValuesWithRBAC(
whereConditions.append(f'("{key}" IS NULL OR "{key}"::TEXT = \'\')') whereConditions.append(f'("{key}" IS NULL OR "{key}"::TEXT = \'\')')
continue continue
if isinstance(val, dict): if isinstance(val, dict):
op = val.get("operator", "equals") colType = fields.get(key, "TEXT")
v = val.get("value", "") _rbacAppendPaginationDictFilter(
if op in ("equals", "eq"): key, val, colType, whereConditions, whereValues
whereConditions.append(f'"{key}"::TEXT = %s') )
whereValues.append(str(v))
elif op == "contains":
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
whereValues.append(f"%{v}%")
elif op == "between":
fromVal = v.get("from", "") if isinstance(v, dict) else ""
toVal = v.get("to", "") if isinstance(v, dict) else ""
if fromVal and toVal:
whereConditions.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s')
whereValues.extend([str(fromVal), str(toVal)])
elif fromVal:
whereConditions.append(f'"{key}"::TEXT >= %s')
whereValues.append(str(fromVal))
elif toVal:
whereConditions.append(f'"{key}"::TEXT <= %s')
whereValues.append(str(toVal))
else:
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
whereValues.append(str(v) if isinstance(v, str) else str(val))
else: else:
whereConditions.append(f'"{key}"::TEXT ILIKE %s') whereConditions.append(f'"{key}"::TEXT ILIKE %s')
whereValues.append(str(val)) whereValues.append(str(val))

View file

@ -475,6 +475,9 @@ def list_feature_instances(
if mode == "filterValues": if mode == "filterValues":
if not column: if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues") raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
from modules.routes.routeHelpers import enrichRowsWithFkLabels
from modules.datamodels.datamodelFeatures import FeatureInstance
enrichRowsWithFkLabels(items, FeatureInstance)
return handleFilterValuesInMemory(items, column, pagination) return handleFilterValuesInMemory(items, column, pagination)
if mode == "ids": if mode == "ids":

View file

@ -929,42 +929,17 @@ def list_roles(
if mode == "filterValues": if mode == "filterValues":
if not column: if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues") raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
from modules.routes.routeHelpers import handleFilterValuesInMemory from modules.routes.routeHelpers import handleFilterValuesInMemory, enrichRowsWithFkLabels
enrichRowsWithFkLabels(result, Role)
return handleFilterValuesInMemory(result, column, pagination) return handleFilterValuesInMemory(result, column, pagination)
if mode == "ids": if mode == "ids":
from modules.routes.routeHelpers import handleIdsInMemory from modules.routes.routeHelpers import handleIdsInMemory
return handleIdsInMemory(result, pagination) return handleIdsInMemory(result, pagination)
# Apply search, filtering and sorting if pagination requested
if paginationParams: if paginationParams:
# Apply search (if search term provided in filters) from modules.routes.routeHelpers import applyFiltersAndSort
searchTerm = paginationParams.filters.get("search", "").lower() if paginationParams.filters else "" sortedResult = applyFiltersAndSort(result, paginationParams)
if searchTerm:
searchedResult = []
for item in result:
roleLabel = (item.get("roleLabel") or "").lower()
descText = (item.get("description") or "").lower()
scopeType = (item.get("scopeType") or "").lower()
if searchTerm in roleLabel or searchTerm in descText or searchTerm in scopeType:
searchedResult.append(item)
result = searchedResult
# Apply filtering (if filters provided)
if paginationParams.filters:
# Use the interface's filter method
filteredResult = interface._applyFilters(result, paginationParams.filters)
else:
filteredResult = result
# Apply sorting (in order of sortFields)
if paginationParams.sort:
sortedResult = interface._applySorting(filteredResult, paginationParams.sort)
else:
sortedResult = filteredResult
# Apply pagination
totalItems = len(sortedResult) totalItems = len(sortedResult)
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0 totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
startIdx = (paginationParams.page - 1) * paginationParams.pageSize startIdx = (paginationParams.page - 1) * paginationParams.pageSize

View file

@ -36,37 +36,47 @@ def _applySortFilterSearch(
search: Optional[str] = None, search: Optional[str] = None,
searchableKeys: Optional[List[str]] = None, searchableKeys: Optional[List[str]] = None,
) -> List[Dict[str, Any]]: ) -> List[Dict[str, Any]]:
"""Apply sort, filter and search to a list of dicts in-memory.""" """Apply sort, filter and search to a list of dicts in-memory.
Delegates to the shared ``applyFiltersAndSort`` from routeHelpers so that
date-range filters (``between`` operator) and null/empty filters work
consistently across all in-memory routes.
"""
from modules.routes.routeHelpers import applyFiltersAndSort
from modules.datamodels.datamodelPagination import PaginationParams, SortField
filtersDict: Optional[Dict[str, Any]] = None
if filtersJson: if filtersJson:
try: try:
filters = json.loads(filtersJson) if isinstance(filtersJson, str) else filtersJson filtersDict = json.loads(filtersJson) if isinstance(filtersJson, str) else filtersJson
if isinstance(filters, dict):
for key, val in filters.items():
if val is None or val == "":
continue
if isinstance(val, list):
items = [r for r in items if str(r.get(key, "")) in [str(v) for v in val]]
else:
items = [r for r in items if str(r.get(key, "")).lower() == str(val).lower()]
except (json.JSONDecodeError, TypeError): except (json.JSONDecodeError, TypeError):
pass pass
if search and searchableKeys: if search and searchableKeys:
needle = search.lower() if filtersDict is None:
items = [r for r in items if any(needle in str(r.get(k, "")).lower() for k in searchableKeys)] filtersDict = {}
filtersDict["search"] = search
sortList = None
if sortJson: if sortJson:
try: try:
sortList = json.loads(sortJson) if isinstance(sortJson, str) else sortJson raw = json.loads(sortJson) if isinstance(sortJson, str) else sortJson
if isinstance(sortList, list): if isinstance(raw, list):
for sortDef in reversed(sortList): sortList = raw
field = sortDef.get("field", "")
desc = sortDef.get("direction", "asc") == "desc"
items.sort(key=lambda r, f=field: (r.get(f) is None, r.get(f, "")), reverse=desc)
except (json.JSONDecodeError, TypeError): except (json.JSONDecodeError, TypeError):
pass pass
return items if not filtersDict and not sortList:
return items
sortFields = [SortField(**s) for s in sortList] if sortList else []
params = PaginationParams.model_construct(
page=1,
pageSize=len(items) or 1,
filters=filtersDict or {},
sort=sortFields,
)
return applyFiltersAndSort(items, params)
def _distinctColumnValues(items: List[Dict[str, Any]], column: str) -> List[Optional[str]]: def _distinctColumnValues(items: List[Dict[str, Any]], column: str) -> List[Optional[str]]:

View file

@ -244,7 +244,7 @@ class TransactionResponse(BaseModel):
aicoreProvider: Optional[str] aicoreProvider: Optional[str]
aicoreModel: Optional[str] = None aicoreModel: Optional[str] = None
createdByUserId: Optional[str] = None createdByUserId: Optional[str] = None
createdAt: Optional[datetime] sysCreatedAt: Optional[datetime] = None
mandateId: Optional[str] = None mandateId: Optional[str] = None
mandateName: Optional[str] = None mandateName: Optional[str] = None
@ -311,7 +311,7 @@ class UserTransactionResponse(BaseModel):
aicoreProvider: Optional[str] aicoreProvider: Optional[str]
aicoreModel: Optional[str] = None aicoreModel: Optional[str] = None
createdByUserId: Optional[str] = None createdByUserId: Optional[str] = None
createdAt: Optional[datetime] sysCreatedAt: Optional[datetime] = None
mandateId: Optional[str] = None mandateId: Optional[str] = None
mandateName: Optional[str] = None mandateName: Optional[str] = None
userId: Optional[str] = None userId: Optional[str] = None
@ -515,7 +515,7 @@ def getTransactions(
aicoreProvider=t.get("aicoreProvider"), aicoreProvider=t.get("aicoreProvider"),
aicoreModel=t.get("aicoreModel"), aicoreModel=t.get("aicoreModel"),
createdByUserId=t.get("createdByUserId"), createdByUserId=t.get("createdByUserId"),
createdAt=t.get("sysCreatedAt"), sysCreatedAt=t.get("sysCreatedAt"),
mandateId=t.get("mandateId"), mandateId=t.get("mandateId"),
mandateName=t.get("mandateName") mandateName=t.get("mandateName")
)) ))
@ -1073,13 +1073,9 @@ def handleSubscriptionCheckoutCompleted(session, eventId: str) -> None:
stripeSub = stripeToDict(stripe.Subscription.retrieve(stripeSubId, expand=["items"])) stripeSub = stripeToDict(stripe.Subscription.retrieve(stripeSubId, expand=["items"]))
if stripeSub.get("current_period_start"): if stripeSub.get("current_period_start"):
stripeData["currentPeriodStart"] = datetime.fromtimestamp( stripeData["currentPeriodStart"] = float(stripeSub["current_period_start"])
stripeSub["current_period_start"], tz=timezone.utc
).isoformat()
if stripeSub.get("current_period_end"): if stripeSub.get("current_period_end"):
stripeData["currentPeriodEnd"] = datetime.fromtimestamp( stripeData["currentPeriodEnd"] = float(stripeSub["current_period_end"])
stripeSub["current_period_end"], tz=timezone.utc
).isoformat()
from modules.serviceCenter.services.serviceSubscription.stripeBootstrap import getStripePricesForPlan from modules.serviceCenter.services.serviceSubscription.stripeBootstrap import getStripePricesForPlan
priceMapping = getStripePricesForPlan(planKey) priceMapping = getStripePricesForPlan(planKey)
@ -1211,13 +1207,9 @@ def _handleSubscriptionWebhook(event) -> None:
periodData: Dict[str, Any] = {} periodData: Dict[str, Any] = {}
if obj.get("current_period_start"): if obj.get("current_period_start"):
periodData["currentPeriodStart"] = datetime.fromtimestamp( periodData["currentPeriodStart"] = float(obj["current_period_start"])
obj["current_period_start"], tz=timezone.utc
).isoformat()
if obj.get("current_period_end"): if obj.get("current_period_end"):
periodData["currentPeriodEnd"] = datetime.fromtimestamp( periodData["currentPeriodEnd"] = float(obj["current_period_end"])
obj["current_period_end"], tz=timezone.utc
).isoformat()
if periodData: if periodData:
subInterface.updateFields(subId, periodData) subInterface.updateFields(subId, periodData)
@ -1462,7 +1454,7 @@ def _enrichTransactionRows(transactions) -> List[Dict[str, Any]]:
aicoreProvider=t.get("aicoreProvider"), aicoreProvider=t.get("aicoreProvider"),
aicoreModel=t.get("aicoreModel"), aicoreModel=t.get("aicoreModel"),
createdByUserId=t.get("createdByUserId"), createdByUserId=t.get("createdByUserId"),
createdAt=t.get("sysCreatedAt") sysCreatedAt=t.get("sysCreatedAt")
) )
result.append(row.model_dump()) result.append(row.model_dump())
@ -1588,7 +1580,7 @@ def getMandateViewTransactions(
aicoreProvider=t.get("aicoreProvider"), aicoreProvider=t.get("aicoreProvider"),
aicoreModel=t.get("aicoreModel"), aicoreModel=t.get("aicoreModel"),
createdByUserId=t.get("createdByUserId"), createdByUserId=t.get("createdByUserId"),
createdAt=t.get("sysCreatedAt"), sysCreatedAt=t.get("sysCreatedAt"),
mandateId=t.get("mandateId"), mandateId=t.get("mandateId"),
mandateName=t.get("mandateName") mandateName=t.get("mandateName")
)) ))
@ -1879,7 +1871,7 @@ def getUserViewTransactions(
aicoreProvider=d.get("aicoreProvider"), aicoreProvider=d.get("aicoreProvider"),
aicoreModel=d.get("aicoreModel"), aicoreModel=d.get("aicoreModel"),
createdByUserId=d.get("createdByUserId"), createdByUserId=d.get("createdByUserId"),
createdAt=d.get("sysCreatedAt") or d.get("createdAt"), sysCreatedAt=d.get("sysCreatedAt"),
mandateId=d.get("mandateId"), mandateId=d.get("mandateId"),
mandateName=d.get("mandateName"), mandateName=d.get("mandateName"),
userId=d.get("userId"), userId=d.get("userId"),

View file

@ -179,7 +179,9 @@ async def get_connections(
if not column: if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues") raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
try: try:
return handleFilterValuesInMemory(_buildEnhancedItems(), column, pagination) items = _buildEnhancedItems()
enrichRowsWithFkLabels(items, UserConnection)
return handleFilterValuesInMemory(items, column, pagination)
except Exception as e: except Exception as e:
logger.error(f"Error getting filter values for connections: {str(e)}") logger.error(f"Error getting filter values for connections: {str(e)}")
raise HTTPException(status_code=500, detail=str(e)) raise HTTPException(status_code=500, detail=str(e))

View file

@ -259,7 +259,6 @@ def get_files(
) )
from modules.routes.routeHelpers import ( from modules.routes.routeHelpers import (
handleFilterValuesInMemory,
handleIdsMode, handleIdsMode,
parseCrossFilterPagination, parseCrossFilterPagination,
) )
@ -275,16 +274,11 @@ def get_files(
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues") raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
crossPagination = parseCrossFilterPagination(column, pagination) crossPagination = parseCrossFilterPagination(column, pagination)
recordFilter = {"sysCreatedBy": managementInterface.userId} recordFilter = {"sysCreatedBy": managementInterface.userId}
try: from fastapi.responses import JSONResponse
from fastapi.responses import JSONResponse values = managementInterface.db.getDistinctColumnValues(
values = managementInterface.db.getDistinctColumnValues( FileItem, column, crossPagination, recordFilter
FileItem, column, crossPagination, recordFilter )
) return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
except Exception:
result = managementInterface.getAllFiles(pagination=None)
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in result]
return handleFilterValuesInMemory(items, column, pagination)
if mode == "ids": if mode == "ids":
recordFilter = {"sysCreatedBy": managementInterface.userId} recordFilter = {"sysCreatedBy": managementInterface.userId}

View file

@ -140,15 +140,9 @@ def get_mandates(
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues") raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
if isPlatformAdmin: if isPlatformAdmin:
crossPagination = parseCrossFilterPagination(column, pagination) crossPagination = parseCrossFilterPagination(column, pagination)
try: from fastapi.responses import JSONResponse
from fastapi.responses import JSONResponse values = appInterface.db.getDistinctColumnValues(Mandate, column, crossPagination)
values = appInterface.db.getDistinctColumnValues(Mandate, column, crossPagination) return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
except Exception:
result = appInterface.getAllMandates(pagination=None)
items = result if isinstance(result, list) else (result.items if hasattr(result, 'items') else result)
items = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
return handleFilterValuesInMemory(items, column, pagination)
else: else:
mandateItems = [] mandateItems = []
for mid in adminMandateIds: for mid in adminMandateIds:
@ -325,18 +319,19 @@ def create_mandate(
plan = BUILTIN_PLANS.get(planKey) plan = BUILTIN_PLANS.get(planKey)
if plan: if plan:
now = datetime.now(timezone.utc) now = datetime.now(timezone.utc)
nowTs = now.timestamp()
targetStatus = SubscriptionStatusEnum.TRIALING if plan.trialDays else SubscriptionStatusEnum.ACTIVE targetStatus = SubscriptionStatusEnum.TRIALING if plan.trialDays else SubscriptionStatusEnum.ACTIVE
sub = MandateSubscription( sub = MandateSubscription(
mandateId=str(newMandate.id), mandateId=str(newMandate.id),
planKey=planKey, planKey=planKey,
status=targetStatus, status=targetStatus,
recurring=plan.autoRenew and not plan.trialDays, recurring=plan.autoRenew and not plan.trialDays,
startedAt=now, startedAt=nowTs,
currentPeriodStart=now, currentPeriodStart=nowTs,
) )
if plan.trialDays: if plan.trialDays:
sub.trialEndsAt = now + timedelta(days=plan.trialDays) sub.trialEndsAt = (now + timedelta(days=plan.trialDays)).timestamp()
sub.currentPeriodEnd = now + timedelta(days=plan.trialDays) sub.currentPeriodEnd = (now + timedelta(days=plan.trialDays)).timestamp()
subInterface = _getSubRoot() subInterface = _getSubRoot()
subInterface.createSubscription(sub) subInterface.createSubscription(sub)
logger.info(f"Created {targetStatus.value} subscription ({planKey}) for mandate {newMandate.id}") logger.info(f"Created {targetStatus.value} subscription ({planKey}) for mandate {newMandate.id}")

View file

@ -100,14 +100,9 @@ def _getUserFilterOrIds(context, paginationJson, column=None, idsMode=False):
if idsMode: if idsMode:
return handleIdsMode(rootInterface.db, UserInDB, paginationJson) return handleIdsMode(rootInterface.db, UserInDB, paginationJson)
crossPagination = parseCrossFilterPagination(column, paginationJson) crossPagination = parseCrossFilterPagination(column, paginationJson)
try: from fastapi.responses import JSONResponse
from fastapi.responses import JSONResponse values = rootInterface.db.getDistinctColumnValues(UserInDB, column, crossPagination)
values = rootInterface.db.getDistinctColumnValues(UserInDB, column, crossPagination) return JSONResponse(content=sorted(values, key=lambda v: v.lower()))
return JSONResponse(content=sorted(values, key=lambda v: v.lower()))
except Exception:
users = appInterface.getAllUsers()
items = [u.model_dump() if hasattr(u, 'model_dump') else u for u in users]
return handleFilterValuesInMemory(items, column, paginationJson, requestLang)
rootInterface = getRootInterface() rootInterface = getRootInterface()
userMandates = rootInterface.getUserMandates(str(context.user.id)) userMandates = rootInterface.getUserMandates(str(context.user.id))

View file

@ -111,27 +111,28 @@ def resolveRoleLabels(ids: List[str]) -> Dict[str, Optional[str]]:
_BUILTIN_FK_RESOLVERS: Dict[str, Callable[[List[str]], Dict[str, str]]] = { _BUILTIN_FK_RESOLVERS: Dict[str, Callable[[List[str]], Dict[str, str]]] = {
"Mandate": resolveMandateLabels, "Mandate": resolveMandateLabels,
"FeatureInstance": resolveInstanceLabels, "FeatureInstance": resolveInstanceLabels,
"User": resolveUserLabels, "UserInDB": resolveUserLabels,
"Role": resolveRoleLabels, "Role": resolveRoleLabels,
} }
def _buildLabelResolversFromModel(modelClass: type) -> Dict[str, Callable[[List[str]], Dict[str, str]]]: def _buildLabelResolversFromModel(modelClass: type) -> Dict[str, Callable[[List[str]], Dict[str, str]]]:
""" """
Auto-build labelResolvers dict from fk_model / fk_target annotations on a Pydantic model. Auto-build labelResolvers dict from ``json_schema_extra.fk_target`` on a Pydantic model.
Maps field names to resolver functions for all fields that have a known FK target. Maps field names to resolver functions when the target table has a registered builtin
Unlike ``_get_fk_sort_meta`` this does NOT require ``fk_label_field`` the resolver and ``fk_target.labelField`` is set (non-None).
builtin resolvers already know which column to read.
""" """
resolvers: Dict[str, Callable[[List[str]], Dict[str, str]]] = {} resolvers: Dict[str, Callable[[List[str]], Dict[str, str]]] = {}
for name, fieldInfo in modelClass.model_fields.items(): for name, fieldInfo in modelClass.model_fields.items():
extra = fieldInfo.json_schema_extra extra = fieldInfo.json_schema_extra
if not extra or not isinstance(extra, dict): if not extra or not isinstance(extra, dict):
continue continue
fkModel = extra.get("fk_model")
tgt = extra.get("fk_target") tgt = extra.get("fk_target")
if not fkModel and isinstance(tgt, dict): if not isinstance(tgt, dict):
fkModel = tgt.get("table") continue
if tgt.get("labelField") is None:
continue
fkModel = tgt.get("table")
if fkModel and fkModel in _BUILTIN_FK_RESOLVERS: if fkModel and fkModel in _BUILTIN_FK_RESOLVERS:
resolvers[name] = _BUILTIN_FK_RESOLVERS[fkModel] resolvers[name] = _BUILTIN_FK_RESOLVERS[fkModel]
return resolvers return resolvers
@ -147,7 +148,7 @@ def enrichRowsWithFkLabels(
"""Add ``{field}Label`` columns to each row for every FK field that has a """Add ``{field}Label`` columns to each row for every FK field that has a
registered resolver. registered resolver.
``modelClass`` if provided, resolvers are auto-built from ``fk_model`` ``modelClass`` if provided, resolvers are auto-built from ``fk_target``
annotations on the Pydantic model (via ``_buildLabelResolversFromModel``). annotations on the Pydantic model (via ``_buildLabelResolversFromModel``).
``labelResolvers`` explicit resolver map that overrides auto-built ones. ``labelResolvers`` explicit resolver map that overrides auto-built ones.
@ -354,7 +355,14 @@ def applyFiltersAndSort(
operator = "equals" operator = "equals"
value = filterValue value = filterValue
if value is None or value == "": if value is None:
result = [
item for item in result
if item.get(field) is None or item.get(field) == ""
]
continue
if value == "":
continue continue
result = [ result = [
@ -455,6 +463,19 @@ def _matchesBetween(itemValue: Any, itemStr: str, value: Any) -> bool:
if toTs is not None: if toTs is not None:
return itemNum <= toTs return itemNum <= toTs
except (ValueError, TypeError): except (ValueError, TypeError):
# Numeric range (e.g. FormGeneratorTable column filters on INTEGER/FLOAT)
try:
itemNum = float(itemValue)
fromNum = float(fromVal) if fromVal not in (None, "") else None
toNum = float(toVal) if toVal not in (None, "") else None
if fromNum is not None and toNum is not None:
return fromNum <= itemNum <= toNum
if fromNum is not None:
return itemNum >= fromNum
if toNum is not None:
return itemNum <= toNum
except (ValueError, TypeError):
pass
fromStr = str(fromVal).lower() if fromVal else "" fromStr = str(fromVal).lower() if fromVal else ""
toStr = str(toVal).lower() if toVal else "" toStr = str(toVal).lower() if toVal else ""
if fromStr and toStr: if fromStr and toStr:
@ -470,13 +491,42 @@ def _extractDistinctValues(
items: List[Dict[str, Any]], items: List[Dict[str, Any]],
columnKey: str, columnKey: str,
requestLang: Optional[str] = None, requestLang: Optional[str] = None,
) -> List[Optional[str]]: ) -> list:
"""Extract sorted distinct display values for a column from enriched items. """Extract sorted distinct display values for a column from enriched items.
When the items contain a ``{columnKey}Label`` field (FK enrichment convention),
returns ``{value, label}`` objects so the frontend shows human-readable
labels in filter dropdowns. Otherwise returns plain strings.
Includes ``None`` as the last entry when at least one row has a null/empty Includes ``None`` as the last entry when at least one row has a null/empty
value this enables the "(Leer)" filter option in the frontend. value this enables the "(Leer)" filter option in the frontend.
""" """
_MISSING = object() _MISSING = object()
labelKey = f"{columnKey}Label"
hasFkLabels = any(labelKey in item for item in items[:20])
if hasFkLabels:
byVal: Dict[str, str] = {}
hasEmpty = False
for item in items:
val = item.get(columnKey, _MISSING)
if val is _MISSING:
continue
if val is None or val == "":
hasEmpty = True
continue
strVal = str(val)
if strVal not in byVal:
label = item.get(labelKey)
byVal[strVal] = str(label) if label else f"NA({strVal[:8]})"
result: list = sorted(
[{"value": v, "label": l} for v, l in byVal.items()],
key=lambda x: x["label"].lower(),
)
if hasEmpty:
result.append(None)
return result
values = set() values = set()
hasEmpty = False hasEmpty = False
for item in items: for item in items:
@ -496,7 +546,7 @@ def _extractDistinctValues(
values.add(text) values.add(text)
else: else:
values.add(str(val)) values.add(str(val))
result: List[Optional[str]] = sorted(values, key=lambda v: v.lower()) result = sorted(values, key=lambda v: v.lower())
if hasEmpty: if hasEmpty:
result.append(None) result.append(None)
return result return result

View file

@ -85,8 +85,8 @@ class InvitationResponse(BaseModel):
roleIds: List[str] roleIds: List[str]
targetUsername: Optional[str] targetUsername: Optional[str]
email: Optional[str] email: Optional[str]
createdBy: str sysCreatedBy: str
createdAt: float sysCreatedAt: float
expiresAt: float expiresAt: float
usedBy: Optional[str] usedBy: Optional[str]
usedAt: Optional[float] usedAt: Optional[float]
@ -227,8 +227,8 @@ def create_invitation(
roleIds=data.roleIds, roleIds=data.roleIds,
targetUsername=target_username_val, targetUsername=target_username_val,
email=email_val, email=email_val,
createdBy=str(context.user.id), sysCreatedBy=str(context.user.id),
createdAt=currentTime, sysCreatedAt=currentTime,
expiresAt=expiresAt, expiresAt=expiresAt,
usedBy=None, usedBy=None,
usedAt=None, usedAt=None,
@ -250,8 +250,8 @@ def create_invitation(
roleIds=data.roleIds, roleIds=data.roleIds,
targetUsername=target_username_val, targetUsername=target_username_val,
email=email_val, email=email_val,
createdBy=str(context.user.id), sysCreatedBy=str(context.user.id),
createdAt=currentTime, sysCreatedAt=currentTime,
expiresAt=expiresAt, expiresAt=expiresAt,
usedBy=None, usedBy=None,
usedAt=None, usedAt=None,
@ -268,7 +268,6 @@ def create_invitation(
roleIds=data.roleIds, roleIds=data.roleIds,
targetUsername=target_username_val, targetUsername=target_username_val,
email=email_val, email=email_val,
createdBy=str(context.user.id),
expiresAt=expiresAt, expiresAt=expiresAt,
maxUses=data.maxUses maxUses=data.maxUses
) )
@ -368,8 +367,6 @@ def create_invitation(
f"to {target_desc}, expires in {data.expiresInHours}h" f"to {target_desc}, expires in {data.expiresInHours}h"
) )
# Invitation extends PowerOnModel: recordCreate/_saveRecord set sysCreatedAt and sysCreatedBy automatically.
# API response uses createdAt/createdBy; map from the system fields (no separate createdAt column on model).
return InvitationResponse( return InvitationResponse(
id=str(createdRecord.get("id")), id=str(createdRecord.get("id")),
token=str(createdRecord.get("token")), token=str(createdRecord.get("token")),
@ -378,8 +375,8 @@ def create_invitation(
roleIds=createdRecord.get("roleIds", []), roleIds=createdRecord.get("roleIds", []),
targetUsername=createdRecord.get("targetUsername"), targetUsername=createdRecord.get("targetUsername"),
email=createdRecord.get("email"), email=createdRecord.get("email"),
createdBy=str(createdRecord["sysCreatedBy"]), sysCreatedBy=str(createdRecord["sysCreatedBy"]),
createdAt=float(createdRecord["sysCreatedAt"]), sysCreatedAt=float(createdRecord["sysCreatedAt"]),
expiresAt=createdRecord.get("expiresAt"), expiresAt=createdRecord.get("expiresAt"),
usedBy=createdRecord.get("usedBy"), usedBy=createdRecord.get("usedBy"),
usedAt=createdRecord.get("usedAt"), usedAt=createdRecord.get("usedAt"),
@ -470,7 +467,9 @@ def list_invitations(
if not column: if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues") raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
try: try:
return handleFilterValuesInMemory(_buildInvitationItems(), column, pagination) items = _buildInvitationItems()
enrichRowsWithFkLabels(items, Invitation)
return handleFilterValuesInMemory(items, column, pagination)
except Exception as e: except Exception as e:
logger.error(f"Error getting filter values for invitations: {e}") logger.error(f"Error getting filter values for invitations: {e}")
raise HTTPException(status_code=500, detail=str(e)) raise HTTPException(status_code=500, detail=str(e))

View file

@ -106,11 +106,11 @@ def _autoActivatePending(subInterface, pendingSub: Dict[str, Any]) -> None:
now = datetime.now(timezone.utc) now = datetime.now(timezone.utc)
targetStatus = SubscriptionStatusEnum.TRIALING if plan and plan.trialDays else SubscriptionStatusEnum.ACTIVE targetStatus = SubscriptionStatusEnum.TRIALING if plan and plan.trialDays else SubscriptionStatusEnum.ACTIVE
additionalData = {"currentPeriodStart": now.isoformat()} additionalData = {"currentPeriodStart": now.timestamp()}
if plan and plan.trialDays: if plan and plan.trialDays:
trialEnd = now + timedelta(days=plan.trialDays) trialEnd = now + timedelta(days=plan.trialDays)
additionalData["trialEndsAt"] = trialEnd.isoformat() additionalData["trialEndsAt"] = trialEnd.timestamp()
additionalData["currentPeriodEnd"] = trialEnd.isoformat() additionalData["currentPeriodEnd"] = trialEnd.timestamp()
try: try:
subInterface.transitionStatus( subInterface.transitionStatus(

View file

@ -486,7 +486,11 @@ def getAllSubscriptions(
if mode == "filterValues": if mode == "filterValues":
if not column: if not column:
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues") raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
return handleFilterValuesInMemory(_buildEnrichedSubscriptions(), column, pagination) from modules.routes.routeHelpers import enrichRowsWithFkLabels
from modules.datamodels.datamodelSubscription import MandateSubscription
items = _buildEnrichedSubscriptions()
enrichRowsWithFkLabels(items, MandateSubscription)
return handleFilterValuesInMemory(items, column, pagination)
if mode == "ids": if mode == "ids":
return handleIdsInMemory(_buildEnrichedSubscriptions(), pagination) return handleIdsInMemory(_buildEnrichedSubscriptions(), pagination)

View file

@ -581,12 +581,9 @@ def _buildIntegrationsOverviewPayload(userId: str, user=None) -> Dict[str, Any]:
# --- Extractors (registered extensions, unique + per-class rows) --- # --- Extractors (registered extensions, unique + per-class rows) ---
try: try:
from modules.serviceCenter.services.serviceExtraction.mainServiceExtraction import ExtractionService from modules.serviceCenter.services.serviceExtraction.subRegistry import getExtractorRegistry
from modules.serviceCenter.services.serviceExtraction.subRegistry import ExtractorRegistry
if ExtractionService._sharedExtractorRegistry is None: reg = getExtractorRegistry()
ExtractionService._sharedExtractorRegistry = ExtractorRegistry()
reg = ExtractionService._sharedExtractorRegistry
ext_map = reg.getExtensionToMimeMap() ext_map = reg.getExtensionToMimeMap()
uniq = sorted({str(k).upper() for k in ext_map.keys() if k and "." not in str(k)}) uniq = sorted({str(k).upper() for k in ext_map.keys() if k and "." not in str(k)})
out["extractorExtensions"] = uniq out["extractorExtensions"] = uniq

View file

@ -132,7 +132,7 @@ def _updateJob(jobId: str, fields: Dict[str, Any]) -> None:
def _markStarted(jobId: str) -> None: def _markStarted(jobId: str) -> None:
_updateJob(jobId, { _updateJob(jobId, {
"status": BackgroundJobStatusEnum.RUNNING.value, "status": BackgroundJobStatusEnum.RUNNING.value,
"startedAt": datetime.now(timezone.utc), "startedAt": datetime.now(timezone.utc).timestamp(),
}) })
@ -141,7 +141,7 @@ def _markSuccess(jobId: str, result: Optional[Dict[str, Any]]) -> None:
"status": BackgroundJobStatusEnum.SUCCESS.value, "status": BackgroundJobStatusEnum.SUCCESS.value,
"result": result or {}, "result": result or {},
"progress": 100, "progress": 100,
"finishedAt": datetime.now(timezone.utc), "finishedAt": datetime.now(timezone.utc).timestamp(),
}) })
@ -150,7 +150,7 @@ def _markError(jobId: str, errorMessage: str) -> None:
_updateJob(jobId, { _updateJob(jobId, {
"status": BackgroundJobStatusEnum.ERROR.value, "status": BackgroundJobStatusEnum.ERROR.value,
"errorMessage": truncated, "errorMessage": truncated,
"finishedAt": datetime.now(timezone.utc), "finishedAt": datetime.now(timezone.utc).timestamp(),
}) })
@ -211,7 +211,7 @@ def listJobs(
out = [r for r in out if r.get("featureInstanceId") == featureInstanceId] out = [r for r in out if r.get("featureInstanceId") == featureInstanceId]
if jobType is not None: if jobType is not None:
out = [r for r in out if r.get("jobType") == jobType] out = [r for r in out if r.get("jobType") == jobType]
out.sort(key=lambda r: r.get("createdAt") or "", reverse=True) out.sort(key=lambda r: r.get("createdAt") or 0, reverse=True)
return out[:limit] return out[:limit]

View file

@ -142,6 +142,7 @@ class SubscriptionService:
self._cleanupPreparatorySubscriptions(mid) self._cleanupPreparatorySubscriptions(mid)
now = datetime.now(timezone.utc) now = datetime.now(timezone.utc)
nowTs = now.timestamp()
if plan.trialDays: if plan.trialDays:
initialStatus = SubscriptionStatusEnum.TRIALING initialStatus = SubscriptionStatusEnum.TRIALING
elif isPaid: elif isPaid:
@ -154,19 +155,19 @@ class SubscriptionService:
planKey=planKey, planKey=planKey,
status=initialStatus, status=initialStatus,
recurring=plan.autoRenew and not plan.trialDays, recurring=plan.autoRenew and not plan.trialDays,
startedAt=now, startedAt=nowTs,
currentPeriodStart=now, currentPeriodStart=nowTs,
snapshotPricePerUserCHF=plan.pricePerUserCHF, snapshotPricePerUserCHF=plan.pricePerUserCHF,
snapshotPricePerInstanceCHF=plan.pricePerFeatureInstanceCHF, snapshotPricePerInstanceCHF=plan.pricePerFeatureInstanceCHF,
) )
if plan.trialDays: if plan.trialDays:
sub.trialEndsAt = now + timedelta(days=plan.trialDays) sub.trialEndsAt = (now + timedelta(days=plan.trialDays)).timestamp()
if plan.billingPeriod == BillingPeriodEnum.MONTHLY: if plan.billingPeriod == BillingPeriodEnum.MONTHLY:
sub.currentPeriodEnd = now + timedelta(days=30) sub.currentPeriodEnd = (now + timedelta(days=30)).timestamp()
elif plan.billingPeriod == BillingPeriodEnum.YEARLY: elif plan.billingPeriod == BillingPeriodEnum.YEARLY:
sub.currentPeriodEnd = now + timedelta(days=365) sub.currentPeriodEnd = (now + timedelta(days=365)).timestamp()
created = self._interface.createSubscription(sub) created = self._interface.createSubscription(sub)
@ -310,11 +311,8 @@ class SubscriptionService:
) )
if currentOperative and currentOperative.get("currentPeriodEnd") and not isTrialPredecessor: if currentOperative and currentOperative.get("currentPeriodEnd") and not isTrialPredecessor:
periodEnd = currentOperative["currentPeriodEnd"] periodEnd = currentOperative["currentPeriodEnd"]
if isinstance(periodEnd, str): subscriptionData["trial_end"] = int(periodEnd)
periodEnd = datetime.fromisoformat(periodEnd) self._interface.updateFields(subRecord["id"], {"effectiveFrom": periodEnd})
trialEndTs = int(periodEnd.timestamp())
subscriptionData["trial_end"] = trialEndTs
self._interface.updateFields(subRecord["id"], {"effectiveFrom": periodEnd.isoformat()})
session = None session = None
for attempt in range(2): for attempt in range(2):
@ -509,9 +507,7 @@ class SubscriptionService:
periodEnd = sub.get("currentPeriodEnd") periodEnd = sub.get("currentPeriodEnd")
if periodEnd: if periodEnd:
if isinstance(periodEnd, str): if periodEnd <= datetime.now(timezone.utc).timestamp():
periodEnd = datetime.fromisoformat(periodEnd)
if periodEnd <= datetime.now(timezone.utc):
raise ValueError("Cannot reactivate — period has already ended") raise ValueError("Cannot reactivate — period has already ended")
stripeSubId = sub.get("stripeSubscriptionId") stripeSubId = sub.get("stripeSubscriptionId")

View file

@ -18,6 +18,7 @@ StripePlanPrice is updated. Other stale active Prices on the same Product
""" """
import logging import logging
from concurrent.futures import ThreadPoolExecutor, as_completed
from typing import Dict, Optional from typing import Dict, Optional
from modules.connectors.connectorDbPostgre import DatabaseConnector from modules.connectors.connectorDbPostgre import DatabaseConnector
@ -242,8 +243,142 @@ def _validateStripeIdsExist(stripe, mapping: StripePlanPrice) -> bool:
return False return False
def _processOnePlan(
stripe,
planKey: str,
plan: SubscriptionPlan,
existingMapping: Optional[StripePlanPrice],
) -> None:
"""Reconcile or provision Stripe Products/Prices for a single plan.
Each call uses its own DB connection so it is safe to run in a thread pool.
"""
stripePeriod = _PERIOD_TO_STRIPE.get(plan.billingPeriod)
if not stripePeriod:
return
interval = stripePeriod["interval"]
intervalCount = int(stripePeriod.get("interval_count") or 1)
db = _getBillingDb()
if existingMapping:
mapping = existingMapping
hasAllPrices = mapping.stripePriceIdUsers and mapping.stripePriceIdInstances
hasAllProducts = mapping.stripeProductIdUsers and mapping.stripeProductIdInstances
if hasAllPrices and hasAllProducts:
if _validateStripeIdsExist(stripe, mapping):
changed = False
reconciledUsers = _reconcilePrice(
stripe, mapping.stripeProductIdUsers, mapping.stripePriceIdUsers,
plan.pricePerUserCHF, interval, f"{planKey} — Benutzer-Lizenz",
intervalCount,
)
if reconciledUsers != mapping.stripePriceIdUsers:
changed = True
reconciledInstances = _reconcilePrice(
stripe, mapping.stripeProductIdInstances, mapping.stripePriceIdInstances,
plan.pricePerFeatureInstanceCHF, interval, f"{planKey} — Modul",
intervalCount,
)
if reconciledInstances != mapping.stripePriceIdInstances:
changed = True
_archiveOtherRecurringPrices(
stripe, mapping.stripeProductIdUsers, reconciledUsers, interval, intervalCount,
)
_archiveOtherRecurringPrices(
stripe, mapping.stripeProductIdInstances, reconciledInstances, interval, intervalCount,
)
if changed:
db.recordModify(StripePlanPrice, mapping.id, {
"stripePriceIdUsers": reconciledUsers,
"stripePriceIdInstances": reconciledInstances,
})
logger.info(
"Reconciled Stripe prices for plan %s to catalog (CHF): users=%s, instances=%s",
planKey, reconciledUsers, reconciledInstances,
)
else:
logger.debug("Stripe prices up-to-date for plan %s", planKey)
return
else:
logger.warning(
"Stored Stripe IDs for plan %s reference unknown objects "
"(likely wrong Stripe account or copied DB) — re-provisioning.",
planKey,
)
productIdUsers = None
productIdInstances = None
priceIdUsers = None
priceIdInstances = None
if plan.pricePerUserCHF > 0:
productIdUsers = _findStripeProduct(stripe, planKey, "users")
if not productIdUsers:
productIdUsers = _createStripeProduct(
stripe, "Benutzer-Lizenzen", f"Benutzer-Lizenzen für {plan.title or planKey}",
planKey, "users",
)
userCents = int(round(plan.pricePerUserCHF * 100))
priceIdUsers = _findExistingStripePrice(
stripe, productIdUsers, userCents, interval, intervalCount,
)
if not priceIdUsers:
priceIdUsers = _createStripePrice(
stripe, productIdUsers, plan.pricePerUserCHF, interval, f"{planKey} — Benutzer-Lizenz",
intervalCount,
)
_archiveOtherRecurringPrices(stripe, productIdUsers, priceIdUsers, interval, intervalCount)
if plan.pricePerFeatureInstanceCHF > 0:
productIdInstances = _findStripeProduct(stripe, planKey, "instances")
if not productIdInstances:
productIdInstances = _createStripeProduct(
stripe, "Module", f"Module für {plan.title or planKey}",
planKey, "instances",
)
instCents = int(round(plan.pricePerFeatureInstanceCHF * 100))
priceIdInstances = _findExistingStripePrice(
stripe, productIdInstances, instCents, interval, intervalCount,
)
if not priceIdInstances:
priceIdInstances = _createStripePrice(
stripe, productIdInstances, plan.pricePerFeatureInstanceCHF, interval,
f"{planKey} — Modul",
intervalCount,
)
_archiveOtherRecurringPrices(
stripe, productIdInstances, priceIdInstances, interval, intervalCount,
)
persistData = {
"stripeProductId": "",
"stripeProductIdUsers": productIdUsers,
"stripeProductIdInstances": productIdInstances,
"stripePriceIdUsers": priceIdUsers,
"stripePriceIdInstances": priceIdInstances,
}
if existingMapping:
db.recordModify(StripePlanPrice, existingMapping.id, persistData)
else:
db.recordCreate(StripePlanPrice, StripePlanPrice(planKey=planKey, **persistData).model_dump())
logger.info(
"Stripe bootstrapped for %s: users=%s/%s, instances=%s/%s",
planKey, productIdUsers, priceIdUsers, productIdInstances, priceIdInstances,
)
def bootstrapStripePrices() -> None: def bootstrapStripePrices() -> None:
"""Ensure all paid plans have separate Stripe Products for users and instances.""" """Ensure all paid plans have separate Stripe Products for users and instances.
Plans are processed in parallel (one thread per plan) to reduce boot time.
Each thread uses its own DB connection; Stripe SDK is thread-safe.
"""
try: try:
from modules.shared.stripeClient import getStripeClient from modules.shared.stripeClient import getStripeClient
stripe = getStripeClient() stripe = getStripeClient()
@ -251,132 +386,29 @@ def bootstrapStripePrices() -> None:
logger.error("Stripe not configured — cannot bootstrap subscription prices: %s", e) logger.error("Stripe not configured — cannot bootstrap subscription prices: %s", e)
return return
db = _getBillingDb() existing = _loadExistingMappings(_getBillingDb())
existing = _loadExistingMappings(db)
for planKey, plan in BUILTIN_PLANS.items(): plans = [
if plan.billingPeriod == BillingPeriodEnum.NONE: (planKey, plan)
continue for planKey, plan in BUILTIN_PLANS.items()
if plan.pricePerUserCHF == 0 and plan.pricePerFeatureInstanceCHF == 0: if plan.billingPeriod != BillingPeriodEnum.NONE
continue and (plan.pricePerUserCHF > 0 or plan.pricePerFeatureInstanceCHF > 0)
]
stripePeriod = _PERIOD_TO_STRIPE.get(plan.billingPeriod) if not plans:
if not stripePeriod: return
continue
interval = stripePeriod["interval"] with ThreadPoolExecutor(max_workers=len(plans)) as executor:
intervalCount = int(stripePeriod.get("interval_count") or 1) futures = {
executor.submit(_processOnePlan, stripe, planKey, plan, existing.get(planKey)): planKey
if planKey in existing: for planKey, plan in plans
mapping = existing[planKey]
hasAllPrices = mapping.stripePriceIdUsers and mapping.stripePriceIdInstances
hasAllProducts = mapping.stripeProductIdUsers and mapping.stripeProductIdInstances
if hasAllPrices and hasAllProducts:
if _validateStripeIdsExist(stripe, mapping):
changed = False
reconciledUsers = _reconcilePrice(
stripe, mapping.stripeProductIdUsers, mapping.stripePriceIdUsers,
plan.pricePerUserCHF, interval, f"{planKey} — Benutzer-Lizenz",
intervalCount,
)
if reconciledUsers != mapping.stripePriceIdUsers:
changed = True
reconciledInstances = _reconcilePrice(
stripe, mapping.stripeProductIdInstances, mapping.stripePriceIdInstances,
plan.pricePerFeatureInstanceCHF, interval, f"{planKey} — Modul",
intervalCount,
)
if reconciledInstances != mapping.stripePriceIdInstances:
changed = True
_archiveOtherRecurringPrices(
stripe, mapping.stripeProductIdUsers, reconciledUsers, interval, intervalCount,
)
_archiveOtherRecurringPrices(
stripe, mapping.stripeProductIdInstances, reconciledInstances, interval, intervalCount,
)
if changed:
db.recordModify(StripePlanPrice, mapping.id, {
"stripePriceIdUsers": reconciledUsers,
"stripePriceIdInstances": reconciledInstances,
})
logger.info(
"Reconciled Stripe prices for plan %s to catalog (CHF): users=%s, instances=%s",
planKey, reconciledUsers, reconciledInstances,
)
else:
logger.debug("Stripe prices up-to-date for plan %s", planKey)
continue
else:
logger.warning(
"Stored Stripe IDs for plan %s reference unknown objects "
"(likely wrong Stripe account or copied DB) — re-provisioning.",
planKey,
)
productIdUsers = None
productIdInstances = None
priceIdUsers = None
priceIdInstances = None
if plan.pricePerUserCHF > 0:
productIdUsers = _findStripeProduct(stripe, planKey, "users")
if not productIdUsers:
productIdUsers = _createStripeProduct(
stripe, "Benutzer-Lizenzen", f"Benutzer-Lizenzen für {plan.title or planKey}",
planKey, "users",
)
userCents = int(round(plan.pricePerUserCHF * 100))
priceIdUsers = _findExistingStripePrice(
stripe, productIdUsers, userCents, interval, intervalCount,
)
if not priceIdUsers:
priceIdUsers = _createStripePrice(
stripe, productIdUsers, plan.pricePerUserCHF, interval, f"{planKey} — Benutzer-Lizenz",
intervalCount,
)
_archiveOtherRecurringPrices(stripe, productIdUsers, priceIdUsers, interval, intervalCount)
if plan.pricePerFeatureInstanceCHF > 0:
productIdInstances = _findStripeProduct(stripe, planKey, "instances")
if not productIdInstances:
productIdInstances = _createStripeProduct(
stripe, "Module", f"Module für {plan.title or planKey}",
planKey, "instances",
)
instCents = int(round(plan.pricePerFeatureInstanceCHF * 100))
priceIdInstances = _findExistingStripePrice(
stripe, productIdInstances, instCents, interval, intervalCount,
)
if not priceIdInstances:
priceIdInstances = _createStripePrice(
stripe, productIdInstances, plan.pricePerFeatureInstanceCHF, interval,
f"{planKey} — Modul",
intervalCount,
)
_archiveOtherRecurringPrices(
stripe, productIdInstances, priceIdInstances, interval, intervalCount,
)
persistData = {
"stripeProductId": "",
"stripeProductIdUsers": productIdUsers,
"stripeProductIdInstances": productIdInstances,
"stripePriceIdUsers": priceIdUsers,
"stripePriceIdInstances": priceIdInstances,
} }
for future in as_completed(futures):
if planKey in existing: planKey = futures[future]
db.recordModify(StripePlanPrice, existing[planKey].id, persistData) try:
else: future.result()
db.recordCreate(StripePlanPrice, StripePlanPrice(planKey=planKey, **persistData).model_dump()) except Exception as e:
logger.error("Stripe bootstrap failed for plan %s: %s", planKey, e)
logger.info(
"Stripe bootstrapped for %s: users=%s/%s, instances=%s/%s",
planKey, productIdUsers, priceIdUsers, productIdInstances, priceIdInstances,
)
def getStripePricesForPlan(planKey: str) -> Optional[StripePlanPrice]: def getStripePricesForPlan(planKey: str) -> Optional[StripePlanPrice]:

View file

@ -291,15 +291,11 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
} }
mergedExtra = _mergedFieldJsonExtra(field) mergedExtra = _mergedFieldJsonExtra(field)
fkModelName = mergedExtra.get("fk_model")
fkTarget = mergedExtra.get("fk_target") fkTarget = mergedExtra.get("fk_target")
if not fkModelName and isinstance(fkTarget, dict) and fkTarget.get("table"): if isinstance(fkTarget, dict) and fkTarget.get("table"):
fkModelName = fkTarget.get("table") attr_def["fkModel"] = fkTarget["table"]
hasFk = bool(fkModelName) or (isinstance(fkTarget, dict) and bool(fkTarget.get("table"))) if fkTarget.get("labelField"):
if hasFk: attr_def["displayField"] = f"{name}Label"
attr_def["displayField"] = f"{name}Label"
if fkModelName:
attr_def["fkModel"] = fkModelName
# Render hints (Excel-like format string + i18n-resolved label tokens). # Render hints (Excel-like format string + i18n-resolved label tokens).
# Labels are resolved server-side via resolveText() so the FE renders them # Labels are resolved server-side via resolveText() so the FE renders them
@ -318,6 +314,37 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
return {"model": model_label, "attributes": attributes} return {"model": model_label, "attributes": attributes}
def _loadFeatureDatamodelClasses(modelClasses: Dict[str, Type[BaseModel]]) -> None:
"""Register Pydantic models from ``modules.features.*`` ``datamodel*.py`` files."""
features_dir = os.path.join(
os.path.dirname(os.path.dirname(__file__)), "features"
)
if not os.path.isdir(features_dir):
return
for root, _dirs, files in os.walk(features_dir):
for fileName in files:
if not fileName.startswith("datamodel") or not fileName.endswith(".py"):
continue
fullPath = os.path.join(root, fileName)
relPath = os.path.relpath(fullPath, features_dir)
moduleRel = os.path.splitext(relPath)[0].replace("\\", ".").replace("/", ".")
module_name = f"modules.features.{moduleRel}"
try:
module = importlib.import_module(module_name)
for name, obj in inspect.getmembers(module):
if (
inspect.isclass(obj)
and issubclass(obj, BaseModel)
and obj != BaseModel
):
modelClasses[name] = obj
except Exception as e:
logger.warning(
f"Error importing feature datamodel module {module_name}: {str(e)}",
exc_info=True,
)
def getModelClasses() -> Dict[str, Type[BaseModel]]: def getModelClasses() -> Dict[str, Type[BaseModel]]:
""" """
Dynamically get all model classes from all model modules. Dynamically get all model classes from all model modules.
@ -375,6 +402,8 @@ def getModelClasses() -> Dict[str, Type[BaseModel]]:
logger.warning(f"Error importing module {module_name}: {str(e)}", exc_info=True) logger.warning(f"Error importing module {module_name}: {str(e)}", exc_info=True)
# Continue with other modules even if one fails # Continue with other modules even if one fails
_loadFeatureDatamodelClasses(modelClasses)
return modelClasses return modelClasses

View file

@ -241,3 +241,32 @@ def _invalidateFkCache() -> None:
with _lock: with _lock:
_cachedRelationships = None _cachedRelationships = None
_cachedTableToDb = None _cachedTableToDb = None
_FK_TARGET_REQUIRED_KEYS = {"db", "table", "labelField"}
def validateFkTargets() -> List[str]:
"""Validate every ``fk_target`` dict across all registered PowerOnModel subclasses.
Returns a list of error strings (empty = all good).
Each ``fk_target`` must contain exactly ``db``, ``table``, and ``labelField``
(``labelField`` may be ``None``).
"""
_ensureModelsLoaded()
errors: List[str] = []
for tableName, modelCls in MODEL_REGISTRY.items():
for fieldName, fieldInfo in modelCls.model_fields.items():
extra = fieldInfo.json_schema_extra
if not isinstance(extra, dict):
continue
fkTarget = extra.get("fk_target")
if fkTarget is None:
continue
if not isinstance(fkTarget, dict):
errors.append(f"{tableName}.{fieldName}: fk_target is not a dict ({type(fkTarget).__name__})")
continue
missing = _FK_TARGET_REQUIRED_KEYS - fkTarget.keys()
if missing:
errors.append(f"{tableName}.{fieldName}: fk_target missing keys {sorted(missing)}")
return errors

View file

@ -15,7 +15,7 @@ syncToAccounting (via DataRef on documents[0]).
import json import json
import logging import logging
from datetime import datetime from datetime import datetime, timezone
from typing import Dict, Any, List, Optional from typing import Dict, Any, List, Optional
from modules.datamodels.datamodelChat import ActionResult, ActionDocument from modules.datamodels.datamodelChat import ActionResult, ActionDocument
@ -79,6 +79,31 @@ def _parseIsoDate(value: Any) -> Optional[datetime]:
return None return None
def _toTimestamp(value: Any) -> Optional[float]:
"""Convert ISO date string or numeric value to UTC midnight unix timestamp."""
if value is None or value == "":
return None
if isinstance(value, (int, float)):
return float(value)
raw = _cleanStr(value)
if not raw:
return None
try:
return datetime.strptime(raw[:10], "%Y-%m-%d").replace(tzinfo=timezone.utc).timestamp()
except ValueError:
return None
def _timestampToDatetime(value: Any) -> Optional[datetime]:
"""Convert UTC unix timestamp (float) to datetime for proximity scoring."""
if value is None:
return None
try:
return datetime.fromtimestamp(float(value), tz=timezone.utc)
except (ValueError, TypeError, OSError):
return None
def _normaliseAmount(value: Any) -> float: def _normaliseAmount(value: Any) -> float:
"""Use absolute rounded amount, since bank lines are often signed.""" """Use absolute rounded amount, since bank lines are often signed."""
return round(abs(_parseFloat(value)), 2) return round(abs(_parseFloat(value)), 2)
@ -103,7 +128,7 @@ def _findBestBankMatch(
bankRef = _normaliseRef(bankPosition.get("paymentReference") or bankPosition.get("bookingReference")) bankRef = _normaliseRef(bankPosition.get("paymentReference") or bankPosition.get("bookingReference"))
bankAmount = _normaliseAmount(bankPosition.get("bookingAmount")) bankAmount = _normaliseAmount(bankPosition.get("bookingAmount"))
bankIban = _normaliseRef(bankPosition.get("payeeIban")) bankIban = _normaliseRef(bankPosition.get("payeeIban"))
bankDate = _parseIsoDate(bankPosition.get("valuta")) bankDate = _timestampToDatetime(bankPosition.get("valuta"))
bankCompany = _normaliseCompany(bankPosition.get("company")) bankCompany = _normaliseCompany(bankPosition.get("company"))
bestScore = 0 bestScore = 0
@ -122,7 +147,7 @@ def _findBestBankMatch(
candidateRef = _normaliseRef(candidate.get("paymentReference") or candidate.get("bookingReference")) candidateRef = _normaliseRef(candidate.get("paymentReference") or candidate.get("bookingReference"))
candidateAmount = _normaliseAmount(candidate.get("bookingAmount")) candidateAmount = _normaliseAmount(candidate.get("bookingAmount"))
candidateIban = _normaliseRef(candidate.get("payeeIban")) candidateIban = _normaliseRef(candidate.get("payeeIban"))
candidateDate = _parseIsoDate(candidate.get("valuta")) candidateDate = _timestampToDatetime(candidate.get("valuta"))
candidateCompany = _normaliseCompany(candidate.get("company")) candidateCompany = _normaliseCompany(candidate.get("company"))
# Strongest signal: structured payment reference / invoice reference match. # Strongest signal: structured payment reference / invoice reference match.
@ -183,7 +208,7 @@ def _recordToPosition(record: Dict[str, Any], documentId: Optional[str], feature
return { return {
"documentId": documentId, "documentId": documentId,
"documentType": recDocType, "documentType": recDocType,
"valuta": record.get("valuta"), "valuta": _toTimestamp(record.get("valuta")),
"transactionDateTime": record.get("transactionDateTime"), "transactionDateTime": record.get("transactionDateTime"),
"company": record.get("company", ""), "company": record.get("company", ""),
"desc": record.get("desc", ""), "desc": record.get("desc", ""),
@ -203,7 +228,7 @@ def _recordToPosition(record: Dict[str, Any], documentId: Optional[str], feature
"payeeName": _cleanStr(record.get("payeeName")), "payeeName": _cleanStr(record.get("payeeName")),
"payeeBic": _cleanStr(record.get("payeeBic")), "payeeBic": _cleanStr(record.get("payeeBic")),
"paymentReference": _cleanStr(record.get("paymentReference")), "paymentReference": _cleanStr(record.get("paymentReference")),
"dueDate": _cleanStr(record.get("dueDate")), "dueDate": _toTimestamp(record.get("dueDate")),
"featureInstanceId": featureInstanceId, "featureInstanceId": featureInstanceId,
"mandateId": mandateId, "mandateId": mandateId,
} }

View file

@ -20,6 +20,7 @@ This action does NOT trigger an external sync — use
import json import json
import logging import logging
import re import re
from datetime import datetime as _dt, timezone as _tz
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from modules.datamodels.datamodelChat import ActionResult from modules.datamodels.datamodelChat import ActionResult
@ -27,6 +28,26 @@ from modules.datamodels.datamodelChat import ActionResult
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def _isoToTs(isoDate: Optional[str]) -> Optional[float]:
"""``YYYY-MM-DD`` → UTC midnight unix timestamp (or None)."""
if not isoDate:
return None
try:
return _dt.strptime(isoDate.strip()[:10], "%Y-%m-%d").replace(tzinfo=_tz.utc).timestamp()
except (ValueError, AttributeError):
return None
def _tsToIso(ts) -> Optional[str]:
"""Unix timestamp → ``YYYY-MM-DD`` (or None)."""
if ts is None:
return None
try:
return _dt.fromtimestamp(float(ts), tz=_tz.utc).strftime("%Y-%m-%d")
except (ValueError, TypeError, OSError):
return None
_NAME_NORMALIZE_RE = re.compile(r"[^a-z0-9]+") _NAME_NORMALIZE_RE = re.compile(r"[^a-z0-9]+")
_ENTITY_TO_MODEL = { _ENTITY_TO_MODEL = {
"contact": "TrusteeDataContact", "contact": "TrusteeDataContact",
@ -224,7 +245,9 @@ def _deriveRentForContact(
if not entries or not lines: if not entries or not lines:
return [], None return [], None
fromDate, toDate = _parsePeriod(period) fromDateStr, toDateStr = _parsePeriod(period)
fromTs = _isoToTs(fromDateStr)
toTs = _isoToTs(toDateStr)
accountMatcher = _accountMatcher(accountPattern) accountMatcher = _accountMatcher(accountPattern)
nameKey = _normalizeText(contact.get("name") or "") nameKey = _normalizeText(contact.get("name") or "")
contactNumber = (contact.get("contactNumber") or "").strip() contactNumber = (contact.get("contactNumber") or "").strip()
@ -236,10 +259,10 @@ def _deriveRentForContact(
eid = e.get("id") eid = e.get("id")
if not eid: if not eid:
continue continue
bDate = e.get("bookingDate") or "" bDate = e.get("bookingDate")
if fromDate and bDate and bDate < fromDate: if fromTs is not None and bDate is not None and float(bDate) < fromTs:
continue continue
if toDate and bDate and bDate > toDate: if toTs is not None and bDate is not None and float(bDate) > toTs + 86399:
continue continue
descKey = _normalizeText(" ".join([e.get("description") or "", e.get("reference") or ""])) descKey = _normalizeText(" ".join([e.get("description") or "", e.get("reference") or ""]))
if (nameKey and nameKey in descKey) or (contactNumber and contactNumber in (e.get("reference") or "")): if (nameKey and nameKey in descKey) or (contactNumber and contactNumber in (e.get("reference") or "")):
@ -260,7 +283,7 @@ def _deriveRentForContact(
amount = credit - debit amount = credit - debit
e = entryById.get(ln.get("journalEntryId"), {}) e = entryById.get(ln.get("journalEntryId"), {})
rentLines.append({ rentLines.append({
"date": e.get("bookingDate"), "date": _tsToIso(e.get("bookingDate")),
"ref": e.get("reference"), "ref": e.get("reference"),
"account": accountNo, "account": accountNo,
"amount": round(amount, 2), "amount": round(amount, 2),

View file

@ -8,12 +8,33 @@ Checks lastSyncAt to avoid redundant imports unless forceRefresh is set.
import json import json
import logging import logging
import time import time
from typing import Dict, Any from datetime import datetime as _dt, timezone as _tz
from typing import Dict, Any, Optional
from modules.datamodels.datamodelChat import ActionResult from modules.datamodels.datamodelChat import ActionResult
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def _isoToTs(isoDate: Optional[str]) -> Optional[float]:
"""``YYYY-MM-DD`` → UTC midnight unix timestamp (or None)."""
if not isoDate:
return None
try:
return _dt.strptime(isoDate.strip()[:10], "%Y-%m-%d").replace(tzinfo=_tz.utc).timestamp()
except (ValueError, AttributeError):
return None
def _tsToIso(ts) -> Optional[str]:
"""Unix timestamp → ``YYYY-MM-DD`` (or None)."""
if ts is None:
return None
try:
return _dt.fromtimestamp(float(ts), tz=_tz.utc).strftime("%Y-%m-%d")
except (ValueError, TypeError, OSError):
return None
_SYNC_THRESHOLD_SECONDS = 3600 _SYNC_THRESHOLD_SECONDS = 3600
@ -147,16 +168,18 @@ def _exportAccountingData(trusteeInterface, featureInstanceId: str, dateFrom: st
}) })
entries = trusteeInterface.db.getRecordset(TrusteeDataJournalEntry, recordFilter=baseFilter) or [] entries = trusteeInterface.db.getRecordset(TrusteeDataJournalEntry, recordFilter=baseFilter) or []
fromTs = _isoToTs(dateFrom)
toTs = _isoToTs(dateTo)
entryMap = {} entryMap = {}
for e in entries: for e in entries:
eid = e.get("id", "") eid = e.get("id", "")
bDate = e.get("bookingDate", "") bDate = e.get("bookingDate")
if dateFrom and bDate and bDate < dateFrom: if fromTs is not None and bDate is not None and float(bDate) < fromTs:
continue continue
if dateTo and bDate and bDate > dateTo: if toTs is not None and bDate is not None and float(bDate) > toTs + 86399:
continue continue
entryMap[eid] = { entryMap[eid] = {
"date": bDate, "date": _tsToIso(bDate),
"ref": e.get("reference", ""), "ref": e.get("reference", ""),
"desc": e.get("description", ""), "desc": e.get("description", ""),
"amount": e.get("totalAmount", 0), "amount": e.get("totalAmount", 0),

View file

@ -744,8 +744,8 @@ class DynamicMode(BaseMode):
name=name if name != 'Unknown' else 'Unknown Document', name=name if name != 'Unknown' else 'Unknown Document',
mimeType=mimeType if mimeType and mimeType != 'Unknown' else None, mimeType=mimeType if mimeType and mimeType != 'Unknown' else None,
size=str(size) if size and size != 'Unknown' else None, size=str(size) if size and size != 'Unknown' else None,
created=str(created) if created and created != 'Unknown' else None, created=float(created) if created is not None and created != 'Unknown' else None,
modified=str(modified) if modified and modified != 'Unknown' else None, modified=float(modified) if modified is not None and modified != 'Unknown' else None,
typeGroup=str(typeGroup) if typeGroup and typeGroup != 'Unknown' else None, typeGroup=str(typeGroup) if typeGroup and typeGroup != 'Unknown' else None,
documentId=str(documentId) if documentId and documentId != 'Unknown' else None, documentId=str(documentId) if documentId and documentId != 'Unknown' else None,
reference=str(reference) if reference and reference != 'Unknown' else None, reference=str(reference) if reference and reference != 'Unknown' else None,

View file

@ -9,11 +9,17 @@ These tests exercise pure-logic paths -- no DB, no HTTP. We pass a
would have been written to ``TrusteeDataAccountBalance``. would have been written to ``TrusteeDataAccountBalance``.
""" """
from datetime import datetime, timezone
from typing import Any, Dict, List, Type from typing import Any, Dict, List, Type
from unittest.mock import MagicMock from unittest.mock import MagicMock
import pytest import pytest
def _ts(isoDate: str) -> float:
"""Convert ``YYYY-MM-DD`` to UTC midnight unix timestamp for test fixtures."""
return datetime.strptime(isoDate, "%Y-%m-%d").replace(tzinfo=timezone.utc).timestamp()
from modules.features.trustee.accounting.accountingConnectorBase import AccountingPeriodBalance from modules.features.trustee.accounting.accountingConnectorBase import AccountingPeriodBalance
from modules.features.trustee.accounting.accountingDataSync import ( from modules.features.trustee.accounting.accountingDataSync import (
AccountingDataSync, AccountingDataSync,
@ -124,6 +130,45 @@ class TestPersistBalancesConnectorPath:
assert row["mandateId"] == "m-1" assert row["mandateId"] == "m-1"
def test_connectorBalancesEnrichedWithJournalMovements(self):
"""When connector provides closingBalance but no debit/credit (e.g. RMA /gl/saldo),
the sync should enrich from journal lines."""
entries = [
{"id": "e1", "bookingDate": _ts("2025-06-15")},
{"id": "e2", "bookingDate": _ts("2025-06-20")},
]
lines = [
{"journalEntryId": "e1", "accountNumber": "1020", "debitAmount": 500.0, "creditAmount": 0.0},
{"journalEntryId": "e2", "accountNumber": "1020", "debitAmount": 0.0, "creditAmount": 200.0},
]
db = _FakeDb(entries, lines)
sync = AccountingDataSync(_FakeInterface(db))
connectorRows = [
AccountingPeriodBalance(
accountNumber="1020", periodYear=2025, periodMonth=6,
openingBalance=10000.0, closingBalance=10300.0, currency="CHF",
),
AccountingPeriodBalance(
accountNumber="1020", periodYear=2025, periodMonth=0,
openingBalance=10000.0, closingBalance=10300.0, currency="CHF",
),
]
sync._persistBalances(
"fi-1", "m-1",
_FakeJournalEntry, _FakeJournalLine, _FakeBalance,
connectorRows, "connector",
)
byPeriod = {(r["accountNumber"], r["periodMonth"]): r for r in db.createdRows}
assert byPeriod[("1020", 6)]["closingBalance"] == 10300.0
assert byPeriod[("1020", 6)]["debitTotal"] == 500.0
assert byPeriod[("1020", 6)]["creditTotal"] == 200.0
assert byPeriod[("1020", 0)]["debitTotal"] == 500.0
assert byPeriod[("1020", 0)]["creditTotal"] == 200.0
class TestLocalFallbackCumulative: class TestLocalFallbackCumulative:
"""Replicates the BuHa SoHa scenario WITHOUT prior-year journal data: """Replicates the BuHa SoHa scenario WITHOUT prior-year journal data:
the local fallback can't recreate the prior-year carry-over (by design), the local fallback can't recreate the prior-year carry-over (by design),
@ -134,9 +179,9 @@ class TestLocalFallbackCumulative:
def test_balanceSheetAccount_cumulatesAcrossMonths(self): def test_balanceSheetAccount_cumulatesAcrossMonths(self):
entries = [ entries = [
{"id": "e1", "bookingDate": "2025-01-15"}, {"id": "e1", "bookingDate": _ts("2025-01-15")},
{"id": "e2", "bookingDate": "2025-02-10"}, {"id": "e2", "bookingDate": _ts("2025-02-10")},
{"id": "e3", "bookingDate": "2025-12-20"}, {"id": "e3", "bookingDate": _ts("2025-12-20")},
] ]
lines = [ lines = [
{"journalEntryId": "e1", "accountNumber": "1020", "debitAmount": 1000.0, "creditAmount": 0.0}, {"journalEntryId": "e1", "accountNumber": "1020", "debitAmount": 1000.0, "creditAmount": 0.0},
@ -163,9 +208,9 @@ class TestLocalFallbackCumulative:
def test_incomeStatementAccount_resetsAtFiscalYearStart(self): def test_incomeStatementAccount_resetsAtFiscalYearStart(self):
entries = [ entries = [
{"id": "e1", "bookingDate": "2024-12-31"}, {"id": "e1", "bookingDate": _ts("2024-12-31")},
{"id": "e2", "bookingDate": "2025-06-15"}, {"id": "e2", "bookingDate": _ts("2025-06-15")},
{"id": "e3", "bookingDate": "2025-07-10"}, {"id": "e3", "bookingDate": _ts("2025-07-10")},
] ]
lines = [ lines = [
{"journalEntryId": "e1", "accountNumber": "6000", "debitAmount": 99999.99, "creditAmount": 0.0}, {"journalEntryId": "e1", "accountNumber": "6000", "debitAmount": 99999.99, "creditAmount": 0.0},