datamodel sctirc fk logic in one place
This commit is contained in:
parent
8221a0da3e
commit
564a1200c6
80 changed files with 1808 additions and 1004 deletions
8
app.py
8
app.py
|
|
@ -294,6 +294,14 @@ except Exception as e:
|
|||
async def lifespan(app: FastAPI):
|
||||
logger.info("Application is starting up")
|
||||
|
||||
# Validate FK metadata on all Pydantic models (fail-fast, no silent fallbacks)
|
||||
from modules.shared.fkRegistry import validateFkTargets
|
||||
fkErrors = validateFkTargets()
|
||||
if fkErrors:
|
||||
for err in fkErrors:
|
||||
logger.error("FK metadata validation: %s", err)
|
||||
raise SystemExit(f"FK metadata validation failed ({len(fkErrors)} error(s)) — fix datamodels before starting")
|
||||
|
||||
# AI connectors already pre-warmed at module-load via _eager_prewarm() in aicoreModelRegistry.
|
||||
|
||||
# Bootstrap database if needed (creates initial users, mandates, roles, etc.)
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import logging
|
|||
import importlib
|
||||
import os
|
||||
import time
|
||||
import threading
|
||||
from typing import Dict, List, Optional, Any, Tuple
|
||||
from modules.datamodels.datamodelAi import AiModel
|
||||
from .aicoreBase import BaseConnectorAi
|
||||
|
|
@ -31,6 +32,7 @@ class ModelRegistry:
|
|||
self._connectors: Dict[str, BaseConnectorAi] = {}
|
||||
self._lastRefresh: Optional[float] = None
|
||||
self._refreshInterval: float = 300.0 # 5 minutes
|
||||
self._refreshLock = threading.Lock()
|
||||
self._connectorsInitialized: bool = False
|
||||
self._discoveredConnectorsCache: Optional[List[BaseConnectorAi]] = None # Avoid re-instantiating on every discoverConnectors() call
|
||||
self._getAvailableModelsCache: Dict[Tuple[str, int], Tuple[List[AiModel], float]] = {} # (user_id, rbac_id) -> (models, ts)
|
||||
|
|
@ -47,26 +49,10 @@ class ModelRegistry:
|
|||
|
||||
self._connectors[connectorType] = connector
|
||||
|
||||
# Collect models from this connector
|
||||
try:
|
||||
models = connector.getCachedModels()
|
||||
for model in models:
|
||||
# Validate displayName uniqueness
|
||||
if model.displayName in self._models:
|
||||
existingModel = self._models[model.displayName]
|
||||
errorMsg = f"Duplicate displayName '{model.displayName}' detected! Existing model: displayName='{existingModel.displayName}', name='{existingModel.name}' (connector: {existingModel.connectorType}), New model: displayName='{model.displayName}', name='{model.name}' (connector: {connectorType}). displayName must be unique."
|
||||
logger.error(errorMsg)
|
||||
raise ValueError(errorMsg)
|
||||
|
||||
# TODO TESTING: Override maxTokens if testing override is enabled
|
||||
if TESTING_MAX_TOKENS_OVERRIDE is not None and model.maxTokens > TESTING_MAX_TOKENS_OVERRIDE:
|
||||
originalMaxTokens = model.maxTokens
|
||||
model.maxTokens = TESTING_MAX_TOKENS_OVERRIDE
|
||||
logger.debug(f"TESTING: Overrode maxTokens for {model.displayName}: {originalMaxTokens} -> {TESTING_MAX_TOKENS_OVERRIDE}")
|
||||
|
||||
# Use displayName as the key (must be unique)
|
||||
self._models[model.displayName] = model
|
||||
logger.debug(f"Registered model: {model.displayName} (name: {model.name}) from {connectorType}")
|
||||
self._addModel(model, connectorType)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to register models from {connectorType}: {e}")
|
||||
raise
|
||||
|
|
|
|||
|
|
@ -49,6 +49,102 @@ class AiAnthropic(BaseConnectorAi):
|
|||
def getModels(self) -> List[AiModel]:
|
||||
# Get all available Anthropic models.
|
||||
return [
|
||||
AiModel(
|
||||
name="claude-opus-4-7",
|
||||
displayName="Anthropic Claude Opus 4.7",
|
||||
connectorType="anthropic",
|
||||
apiUrl="https://api.anthropic.com/v1/messages",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=1000000,
|
||||
costPer1kTokensInput=0.005, # $5/M tokens (Anthropic API, 2026-04)
|
||||
costPer1kTokensOutput=0.025, # $25/M tokens
|
||||
speedRating=5,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.QUALITY,
|
||||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 10),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 9),
|
||||
(OperationTypeEnum.DATA_GENERATE, 10),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 9),
|
||||
(OperationTypeEnum.AGENT, 10),
|
||||
(OperationTypeEnum.DATA_QUERY, 3),
|
||||
),
|
||||
version="claude-opus-4-7",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.025
|
||||
),
|
||||
AiModel(
|
||||
name="claude-sonnet-4-6",
|
||||
displayName="Anthropic Claude Sonnet 4.6",
|
||||
connectorType="anthropic",
|
||||
apiUrl="https://api.anthropic.com/v1/messages",
|
||||
temperature=0.2,
|
||||
maxTokens=64000,
|
||||
contextLength=1000000,
|
||||
costPer1kTokensInput=0.003, # $3/M tokens
|
||||
costPer1kTokensOutput=0.015, # $15/M tokens
|
||||
speedRating=7,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.BALANCED,
|
||||
processingMode=ProcessingModeEnum.ADVANCED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 9),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 9),
|
||||
(OperationTypeEnum.DATA_GENERATE, 9),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||
(OperationTypeEnum.AGENT, 9),
|
||||
(OperationTypeEnum.DATA_QUERY, 9),
|
||||
),
|
||||
version="claude-sonnet-4-6",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.003 + (bytesReceived / 4 / 1000) * 0.015
|
||||
),
|
||||
AiModel(
|
||||
name="claude-opus-4-7",
|
||||
displayName="Anthropic Claude Opus 4.7 Vision",
|
||||
connectorType="anthropic",
|
||||
apiUrl="https://api.anthropic.com/v1/messages",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=1000000,
|
||||
costPer1kTokensInput=0.005,
|
||||
costPer1kTokensOutput=0.025,
|
||||
speedRating=5,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiImage,
|
||||
priority=PriorityEnum.QUALITY,
|
||||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.IMAGE_ANALYSE, 10)
|
||||
),
|
||||
version="claude-opus-4-7",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.025
|
||||
),
|
||||
AiModel(
|
||||
name="claude-sonnet-4-6",
|
||||
displayName="Anthropic Claude Sonnet 4.6 Vision",
|
||||
connectorType="anthropic",
|
||||
apiUrl="https://api.anthropic.com/v1/messages",
|
||||
temperature=0.2,
|
||||
maxTokens=64000,
|
||||
contextLength=1000000,
|
||||
costPer1kTokensInput=0.003,
|
||||
costPer1kTokensOutput=0.015,
|
||||
speedRating=6,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiImage,
|
||||
priority=PriorityEnum.QUALITY,
|
||||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.IMAGE_ANALYSE, 10)
|
||||
),
|
||||
version="claude-sonnet-4-6",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.003 + (bytesReceived / 4 / 1000) * 0.015
|
||||
),
|
||||
AiModel(
|
||||
name="claude-sonnet-4-5-20250929",
|
||||
displayName="Anthropic Claude Sonnet 4.5",
|
||||
|
|
|
|||
|
|
@ -123,6 +123,135 @@ class AiOpenai(BaseConnectorAi):
|
|||
version="gpt-4o",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0025 + (bytesReceived / 4 / 1000) * 0.01
|
||||
),
|
||||
AiModel(
|
||||
name="gpt-5.5",
|
||||
displayName="OpenAI GPT-5.5",
|
||||
connectorType="openai",
|
||||
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=1050000,
|
||||
costPer1kTokensInput=0.005, # $5/M tokens (OpenAI API, 2026-04)
|
||||
costPer1kTokensOutput=0.03, # $30/M tokens
|
||||
speedRating=8,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.QUALITY,
|
||||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 10),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 10),
|
||||
(OperationTypeEnum.DATA_GENERATE, 10),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||
(OperationTypeEnum.AGENT, 10),
|
||||
(OperationTypeEnum.DATA_QUERY, 8),
|
||||
),
|
||||
version="gpt-5.5",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.03
|
||||
),
|
||||
AiModel(
|
||||
name="gpt-5.4",
|
||||
displayName="OpenAI GPT-5.4",
|
||||
connectorType="openai",
|
||||
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=1050000,
|
||||
costPer1kTokensInput=0.0025, # $2.50/M tokens
|
||||
costPer1kTokensOutput=0.015, # $15/M tokens
|
||||
speedRating=8,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.BALANCED,
|
||||
processingMode=ProcessingModeEnum.ADVANCED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 9),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 10),
|
||||
(OperationTypeEnum.DATA_GENERATE, 10),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||
(OperationTypeEnum.AGENT, 9),
|
||||
(OperationTypeEnum.DATA_QUERY, 8),
|
||||
),
|
||||
version="gpt-5.4",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0025 + (bytesReceived / 4 / 1000) * 0.015
|
||||
),
|
||||
AiModel(
|
||||
name="gpt-5.4-mini",
|
||||
displayName="OpenAI GPT-5.4 Mini",
|
||||
connectorType="openai",
|
||||
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=400000,
|
||||
costPer1kTokensInput=0.00075, # $0.75/M tokens
|
||||
costPer1kTokensOutput=0.0045, # $4.50/M tokens
|
||||
speedRating=9,
|
||||
qualityRating=9,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.SPEED,
|
||||
processingMode=ProcessingModeEnum.BASIC,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 8),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 9),
|
||||
(OperationTypeEnum.DATA_GENERATE, 9),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||
(OperationTypeEnum.AGENT, 8),
|
||||
(OperationTypeEnum.DATA_QUERY, 10),
|
||||
),
|
||||
version="gpt-5.4-mini",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.00075 + (bytesReceived / 4 / 1000) * 0.0045
|
||||
),
|
||||
AiModel(
|
||||
name="gpt-5.4-nano",
|
||||
displayName="OpenAI GPT-5.4 Nano",
|
||||
connectorType="openai",
|
||||
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=400000,
|
||||
costPer1kTokensInput=0.0002, # $0.20/M tokens
|
||||
costPer1kTokensOutput=0.00125, # $1.25/M tokens
|
||||
speedRating=10,
|
||||
qualityRating=7,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.COST,
|
||||
processingMode=ProcessingModeEnum.BASIC,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 7),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 7),
|
||||
(OperationTypeEnum.DATA_GENERATE, 8),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 9),
|
||||
(OperationTypeEnum.AGENT, 7),
|
||||
(OperationTypeEnum.DATA_QUERY, 10),
|
||||
),
|
||||
version="gpt-5.4-nano",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0002 + (bytesReceived / 4 / 1000) * 0.00125
|
||||
),
|
||||
AiModel(
|
||||
name="gpt-5.5",
|
||||
displayName="OpenAI GPT-5.5 Vision",
|
||||
connectorType="openai",
|
||||
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=1050000,
|
||||
costPer1kTokensInput=0.005,
|
||||
costPer1kTokensOutput=0.03,
|
||||
speedRating=6,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiImage,
|
||||
priority=PriorityEnum.QUALITY,
|
||||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.IMAGE_ANALYSE, 10)
|
||||
),
|
||||
version="gpt-5.5",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.03
|
||||
),
|
||||
AiModel(
|
||||
name="text-embedding-3-small",
|
||||
displayName="OpenAI Embedding Small",
|
||||
|
|
|
|||
|
|
@ -561,29 +561,48 @@ class DatabaseConnector:
|
|||
f"Could not add column '{col}' to '{table}': {add_err}"
|
||||
)
|
||||
|
||||
# Targeted type-downgrade: if a model field has been
|
||||
# changed from a structured type (JSONB) to a plain
|
||||
# TEXT field, alter the column so writes don't fail.
|
||||
# JSONB -> TEXT is a safe, lossless cast (JSONB is
|
||||
# rendered as its JSON-text representation; the
|
||||
# corresponding Pydantic ``@field_validator`` is
|
||||
# responsible for re-decoding legacy data on read).
|
||||
# Column type migrations for existing tables.
|
||||
# TEXT→DOUBLE PRECISION handles three value shapes:
|
||||
# 1. NULL / empty string → NULL
|
||||
# 2. ISO date(time) like "2025-01-22" or "2025-01-22T10:00:00+00" → epoch via EXTRACT
|
||||
# 3. Plain numeric string like "3.14" → direct cast
|
||||
_TEXT_TO_DOUBLE = (
|
||||
'DOUBLE PRECISION USING CASE'
|
||||
' WHEN "{col}" IS NULL OR "{col}" = \'\' THEN NULL'
|
||||
' WHEN "{col}" ~ \'^\\d{4}-\\d{2}-\\d{2}\''
|
||||
' THEN EXTRACT(EPOCH FROM "{col}"::timestamptz)'
|
||||
' ELSE NULLIF("{col}", \'\')::double precision'
|
||||
' END'
|
||||
)
|
||||
_SAFE_TYPE_CHANGES = {
|
||||
("jsonb", "TEXT"): "TEXT USING \"{col}\"::text",
|
||||
("text", "DOUBLE PRECISION"): _TEXT_TO_DOUBLE,
|
||||
("text", "INTEGER"): "INTEGER USING NULLIF(\"{col}\", '')::integer",
|
||||
("timestamp without time zone", "DOUBLE PRECISION"): 'DOUBLE PRECISION USING EXTRACT(EPOCH FROM "{col}" AT TIME ZONE \'UTC\')',
|
||||
("timestamp with time zone", "DOUBLE PRECISION"): 'DOUBLE PRECISION USING EXTRACT(EPOCH FROM "{col}")',
|
||||
("date", "DOUBLE PRECISION"): 'DOUBLE PRECISION USING EXTRACT(EPOCH FROM "{col}"::timestamp AT TIME ZONE \'UTC\')',
|
||||
}
|
||||
for col in sorted(desired_columns & existing_columns):
|
||||
if col == "id":
|
||||
continue
|
||||
desired_sql = (model_fields.get(col) or "").upper()
|
||||
currentType = existing_column_types.get(col, "")
|
||||
if desired_sql == "TEXT" and currentType == "jsonb":
|
||||
migration = _SAFE_TYPE_CHANGES.get((currentType, desired_sql))
|
||||
if migration:
|
||||
castExpr = migration.replace("{col}", col)
|
||||
try:
|
||||
cursor.execute('SAVEPOINT col_migrate')
|
||||
cursor.execute(
|
||||
f'ALTER TABLE "{table}" ALTER COLUMN "{col}" TYPE TEXT USING "{col}"::text'
|
||||
f'ALTER TABLE "{table}" ALTER COLUMN "{col}" TYPE {castExpr}'
|
||||
)
|
||||
cursor.execute('RELEASE SAVEPOINT col_migrate')
|
||||
logger.info(
|
||||
f"Downgraded column '{col}' from JSONB to TEXT on '{table}'"
|
||||
f"Migrated column '{col}' from {currentType} to {desired_sql} on '{table}'"
|
||||
)
|
||||
except Exception as alter_err:
|
||||
cursor.execute('ROLLBACK TO SAVEPOINT col_migrate')
|
||||
logger.warning(
|
||||
f"Could not downgrade column '{col}' on '{table}': {alter_err}"
|
||||
f"Could not migrate column '{col}' on '{table}': {alter_err}"
|
||||
)
|
||||
except Exception as ensure_err:
|
||||
logger.warning(
|
||||
|
|
@ -1096,8 +1115,15 @@ class DatabaseConnector:
|
|||
values.append(f"%{v}")
|
||||
elif op in ("gt", "gte", "lt", "lte"):
|
||||
sqlOp = {"gt": ">", "gte": ">=", "lt": "<", "lte": "<="}[op]
|
||||
where_parts.append(f'"{key}"::TEXT {sqlOp} %s')
|
||||
values.append(str(v))
|
||||
if colType in ("INTEGER", "DOUBLE PRECISION"):
|
||||
try:
|
||||
where_parts.append(f'"{key}"::double precision {sqlOp} %s')
|
||||
values.append(float(v))
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
else:
|
||||
where_parts.append(f'"{key}"::TEXT {sqlOp} %s')
|
||||
values.append(str(v))
|
||||
elif op == "between":
|
||||
fromVal = v.get("from", "") if isinstance(v, dict) else ""
|
||||
toVal = v.get("to", "") if isinstance(v, dict) else ""
|
||||
|
|
@ -1122,6 +1148,21 @@ class DatabaseConnector:
|
|||
toTs = _dt.strptime(str(toVal), '%Y-%m-%d').replace(hour=23, minute=59, second=59, tzinfo=_tz.utc).timestamp()
|
||||
where_parts.append(f'"{key}" <= %s')
|
||||
values.append(toTs)
|
||||
elif isNumericCol:
|
||||
try:
|
||||
if fromVal and toVal:
|
||||
where_parts.append(
|
||||
f'"{key}"::double precision >= %s AND "{key}"::double precision <= %s'
|
||||
)
|
||||
values.extend([float(fromVal), float(toVal)])
|
||||
elif fromVal:
|
||||
where_parts.append(f'"{key}"::double precision >= %s')
|
||||
values.append(float(fromVal))
|
||||
elif toVal:
|
||||
where_parts.append(f'"{key}"::double precision <= %s')
|
||||
values.append(float(toVal))
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
else:
|
||||
if fromVal and toVal:
|
||||
where_parts.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s')
|
||||
|
|
|
|||
|
|
@ -125,7 +125,7 @@ class AiModel(BaseModel):
|
|||
|
||||
# Metadata
|
||||
version: Optional[str] = Field(default=None, description="Model version")
|
||||
lastUpdated: Optional[str] = Field(default=None, description="Last update timestamp")
|
||||
lastUpdated: Optional[float] = Field(default=None, description="Last update timestamp (UTC unix)", json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True) # Allow Callable type
|
||||
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class AiAuditLogEntry(BaseModel):
|
|||
|
||||
userId: str = Field(
|
||||
description="ID of the user who triggered the AI call",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
username: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -43,17 +43,17 @@ class AiAuditLogEntry(BaseModel):
|
|||
)
|
||||
mandateId: str = Field(
|
||||
description="Mandate context of the call",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature instance context",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
featureCode: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature code (e.g. workspace, trustee)",
|
||||
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
|
||||
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"}},
|
||||
)
|
||||
instanceLabel: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
|
|||
|
|
@ -100,7 +100,7 @@ class AuditLogEntry(BaseModel):
|
|||
timestamp: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="UTC timestamp when the event occurred",
|
||||
json_schema_extra={"label": "Zeitstempel", "frontend_type": "datetime", "frontend_readonly": True, "frontend_required": True}
|
||||
json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": True}
|
||||
)
|
||||
|
||||
# Actor identification
|
||||
|
|
@ -111,7 +111,7 @@ class AuditLogEntry(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -130,7 +130,7 @@ class AuditLogEntry(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -142,7 +142,7 @@ class AuditLogEntry(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ class BackgroundJob(PowerOnModel):
|
|||
description="Mandate scope (used for access checks). None for system-wide jobs.",
|
||||
json_schema_extra={
|
||||
"label": "Mandanten-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
|
|
@ -72,7 +72,7 @@ class BackgroundJob(PowerOnModel):
|
|||
description="Feature instance scope (optional)",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
triggeredBy: Optional[str] = Field(
|
||||
|
|
@ -113,18 +113,18 @@ class BackgroundJob(PowerOnModel):
|
|||
json_schema_extra={"label": "Fehler"},
|
||||
)
|
||||
|
||||
createdAt: datetime = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc),
|
||||
description="When the job was submitted",
|
||||
json_schema_extra={"label": "Eingereicht"},
|
||||
createdAt: float = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc).timestamp(),
|
||||
description="When the job was submitted (UTC unix)",
|
||||
json_schema_extra={"label": "Eingereicht", "frontend_type": "timestamp"},
|
||||
)
|
||||
startedAt: Optional[datetime] = Field(
|
||||
startedAt: Optional[float] = Field(
|
||||
None,
|
||||
description="When the handler began running",
|
||||
json_schema_extra={"label": "Gestartet"},
|
||||
description="When the handler began running (UTC unix)",
|
||||
json_schema_extra={"label": "Gestartet", "frontend_type": "timestamp"},
|
||||
)
|
||||
finishedAt: Optional[datetime] = Field(
|
||||
finishedAt: Optional[float] = Field(
|
||||
None,
|
||||
description="When the handler reached a terminal status",
|
||||
json_schema_extra={"label": "Beendet"},
|
||||
description="When the handler reached a terminal status (UTC unix)",
|
||||
json_schema_extra={"label": "Beendet", "frontend_type": "timestamp"},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -46,9 +46,7 @@ class PowerOnModel(BaseModel):
|
|||
"frontend_required": False,
|
||||
"frontend_visible": False,
|
||||
"system": True,
|
||||
"fk_model": "User",
|
||||
"fk_label_field": "username",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
sysModifiedAt: Optional[float] = Field(
|
||||
|
|
@ -73,8 +71,6 @@ class PowerOnModel(BaseModel):
|
|||
"frontend_required": False,
|
||||
"frontend_visible": False,
|
||||
"system": True,
|
||||
"fk_model": "User",
|
||||
"fk_label_field": "username",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -49,12 +49,12 @@ class BillingAccount(PowerOnModel):
|
|||
mandateId: str = Field(
|
||||
...,
|
||||
description="Foreign key to Mandate",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
userId: Optional[str] = Field(
|
||||
None,
|
||||
description="Foreign key to User (None = mandate pool account, set = user audit account)",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
balance: float = Field(default=0.0, description="Current balance in CHF", json_schema_extra={"label": "Guthaben (CHF)"})
|
||||
warningThreshold: float = Field(
|
||||
|
|
@ -62,10 +62,10 @@ class BillingAccount(PowerOnModel):
|
|||
description="Warning threshold in CHF",
|
||||
json_schema_extra={"label": "Warnschwelle (CHF)"},
|
||||
)
|
||||
lastWarningAt: Optional[datetime] = Field(
|
||||
lastWarningAt: Optional[float] = Field(
|
||||
None,
|
||||
description="Last warning sent timestamp",
|
||||
json_schema_extra={"label": "Letzte Warnung"},
|
||||
description="Last warning sent timestamp (UTC unix)",
|
||||
json_schema_extra={"label": "Letzte Warnung", "frontend_type": "timestamp"},
|
||||
)
|
||||
enabled: bool = Field(default=True, description="Account is active", json_schema_extra={"label": "Aktiv"})
|
||||
|
||||
|
|
@ -81,7 +81,7 @@ class BillingTransaction(PowerOnModel):
|
|||
accountId: str = Field(
|
||||
...,
|
||||
description="Foreign key to BillingAccount",
|
||||
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount"}},
|
||||
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount", "labelField": None}},
|
||||
)
|
||||
transactionType: TransactionTypeEnum = Field(..., description="Transaction type", json_schema_extra={"label": "Typ"})
|
||||
amount: float = Field(..., description="Amount in CHF (always positive)", json_schema_extra={"label": "Betrag (CHF)"})
|
||||
|
|
@ -100,19 +100,19 @@ class BillingTransaction(PowerOnModel):
|
|||
featureInstanceId: Optional[str] = Field(
|
||||
None,
|
||||
description="Feature instance ID",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
featureCode: Optional[str] = Field(
|
||||
None,
|
||||
description="Feature code (e.g., automation)",
|
||||
json_schema_extra={"label": "Feature-Code", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
|
||||
json_schema_extra={"label": "Feature-Code", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"}},
|
||||
)
|
||||
aicoreProvider: Optional[str] = Field(None, description="AICore provider (anthropic, openai, etc.)", json_schema_extra={"label": "AI-Anbieter"})
|
||||
aicoreModel: Optional[str] = Field(None, description="AICore model name (e.g., claude-4-sonnet, gpt-4o)", json_schema_extra={"label": "AI-Modell"})
|
||||
createdByUserId: Optional[str] = Field(
|
||||
None,
|
||||
description="User who created/caused this transaction",
|
||||
json_schema_extra={"label": "Erstellt von Benutzer", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Erstellt von Benutzer", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
|
||||
# AI call metadata (for per-call analytics)
|
||||
|
|
@ -133,7 +133,7 @@ class BillingSettings(BaseModel):
|
|||
mandateId: str = Field(
|
||||
...,
|
||||
description="Foreign key to Mandate (UNIQUE)",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
|
||||
warningThresholdPercent: float = Field(
|
||||
|
|
@ -158,7 +158,7 @@ class BillingSettings(BaseModel):
|
|||
)
|
||||
rechargeMaxPerMonth: int = Field(default=3, description="Max auto-recharges per month", json_schema_extra={"label": "Max. Nachladungen/Monat"})
|
||||
rechargesThisMonth: int = Field(default=0, description="Counter: auto-recharges used this month", json_schema_extra={"label": "Nachladungen diesen Monat"})
|
||||
monthResetAt: Optional[datetime] = Field(None, description="When rechargesThisMonth was last reset", json_schema_extra={"label": "Monats-Reset"})
|
||||
monthResetAt: Optional[float] = Field(None, description="When rechargesThisMonth was last reset (UTC unix)", json_schema_extra={"label": "Monats-Reset", "frontend_type": "timestamp"})
|
||||
|
||||
# Notifications
|
||||
notifyEmails: List[str] = Field(
|
||||
|
|
@ -174,10 +174,10 @@ class BillingSettings(BaseModel):
|
|||
description="Peak indexed data volume MB this billing period",
|
||||
json_schema_extra={"label": "Speicher-Peak (MB)"},
|
||||
)
|
||||
storagePeriodStartAt: Optional[datetime] = Field(
|
||||
storagePeriodStartAt: Optional[float] = Field(
|
||||
None,
|
||||
description="Subscription billing period start used for storage reset",
|
||||
json_schema_extra={"label": "Speicher-Periodenbeginn"},
|
||||
description="Subscription billing period start used for storage reset (UTC unix)",
|
||||
json_schema_extra={"label": "Speicher-Periodenbeginn", "frontend_type": "timestamp"},
|
||||
)
|
||||
storageBilledUpToMB: float = Field(
|
||||
default=0.0,
|
||||
|
|
@ -193,9 +193,10 @@ class StripeWebhookEvent(BaseModel):
|
|||
description="Primary key",
|
||||
)
|
||||
event_id: str = Field(..., description="Stripe event ID (evt_xxx)")
|
||||
processed_at: datetime = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc),
|
||||
description="When the event was processed",
|
||||
processed_at: float = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc).timestamp(),
|
||||
description="When the event was processed (UTC unix)",
|
||||
json_schema_extra={"frontend_type": "timestamp"},
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -210,10 +211,14 @@ class UsageStatistics(BaseModel):
|
|||
accountId: str = Field(
|
||||
...,
|
||||
description="Foreign key to BillingAccount",
|
||||
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount"}},
|
||||
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount", "labelField": None}},
|
||||
)
|
||||
periodType: PeriodTypeEnum = Field(..., description="Period type", json_schema_extra={"label": "Periodentyp"})
|
||||
periodStart: date = Field(..., description="Period start date", json_schema_extra={"label": "Periodenbeginn"})
|
||||
periodStart: date = Field(
|
||||
...,
|
||||
description="Period start date",
|
||||
json_schema_extra={"label": "Periodenbeginn", "frontend_type": "date"},
|
||||
)
|
||||
|
||||
# Aggregated values
|
||||
totalCostCHF: float = Field(default=0.0, description="Total cost in CHF", json_schema_extra={"label": "Gesamtkosten (CHF)"})
|
||||
|
|
|
|||
|
|
@ -16,12 +16,12 @@ class ChatLog(PowerOnModel):
|
|||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
|
||||
workflowId: str = Field(
|
||||
description="Foreign key to workflow",
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||
)
|
||||
message: str = Field(description="Log message", json_schema_extra={"label": "Nachricht"})
|
||||
type: str = Field(description="Log type (info, warning, error, etc.)", json_schema_extra={"label": "Typ"})
|
||||
timestamp: float = Field(default_factory=getUtcTimestamp,
|
||||
description="When the log entry was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel"})
|
||||
description="When the log entry was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp"})
|
||||
status: Optional[str] = Field(None, description="Status of the log entry", json_schema_extra={"label": "Status"})
|
||||
progress: Optional[float] = Field(None, description="Progress indicator (0.0 to 1.0)", json_schema_extra={"label": "Fortschritt"})
|
||||
performance: Optional[Dict[str, Any]] = Field(None, description="Performance metrics", json_schema_extra={"label": "Leistung"})
|
||||
|
|
@ -37,11 +37,11 @@ class ChatDocument(PowerOnModel):
|
|||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
|
||||
messageId: str = Field(
|
||||
description="Foreign key to message",
|
||||
json_schema_extra={"label": "Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage"}},
|
||||
json_schema_extra={"label": "Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage", "labelField": None}},
|
||||
)
|
||||
fileId: str = Field(
|
||||
description="Foreign key to file",
|
||||
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem"}},
|
||||
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"}},
|
||||
)
|
||||
fileName: str = Field(description="Name of the file", json_schema_extra={"label": "Dateiname"})
|
||||
fileSize: int = Field(description="Size of the file", json_schema_extra={"label": "Dateigröße"})
|
||||
|
|
@ -81,12 +81,12 @@ class ChatMessage(PowerOnModel):
|
|||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
|
||||
workflowId: str = Field(
|
||||
description="Foreign key to workflow",
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||
)
|
||||
parentMessageId: Optional[str] = Field(
|
||||
None,
|
||||
description="Parent message ID for threading",
|
||||
json_schema_extra={"label": "Übergeordnete Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage"}},
|
||||
json_schema_extra={"label": "Übergeordnete Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage", "labelField": None}},
|
||||
)
|
||||
documents: List[ChatDocument] = Field(default_factory=list, description="Associated documents", json_schema_extra={"label": "Dokumente"})
|
||||
documentsLabel: Optional[str] = Field(None, description="Label for the set of documents", json_schema_extra={"label": "Dokumenten-Label"})
|
||||
|
|
@ -97,7 +97,7 @@ class ChatMessage(PowerOnModel):
|
|||
sequenceNr: Optional[int] = Field(default=0,
|
||||
description="Sequence number of the message (set automatically)", json_schema_extra={"label": "Sequenznummer"})
|
||||
publishedAt: Optional[float] = Field(default=None,
|
||||
description="When the message was published (UTC timestamp in seconds)", json_schema_extra={"label": "Veröffentlicht am"})
|
||||
description="When the message was published (UTC timestamp in seconds)", json_schema_extra={"label": "Veröffentlicht am", "frontend_type": "timestamp"})
|
||||
success: Optional[bool] = Field(None, description="Whether the message processing was successful", json_schema_extra={"label": "Erfolg"})
|
||||
actionId: Optional[str] = Field(None, description="ID of the action that produced this message", json_schema_extra={"label": "Aktions-ID"})
|
||||
actionMethod: Optional[str] = Field(None, description="Method of the action that produced this message", json_schema_extra={"label": "Aktionsmethode"})
|
||||
|
|
@ -125,7 +125,7 @@ class ChatWorkflow(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
linkedWorkflowId: Optional[str] = Field(
|
||||
|
|
@ -219,7 +219,7 @@ class UserInputRequest(BaseModel):
|
|||
workflowId: Optional[str] = Field(
|
||||
None,
|
||||
description="Optional ID of the workflow to continue",
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||
)
|
||||
allowedProviders: Optional[List[str]] = Field(None, description="List of allowed AI providers (multiselect)", json_schema_extra={"label": "Erlaubte Anbieter"})
|
||||
|
||||
|
|
@ -281,8 +281,8 @@ class ObservationPreview(BaseModel):
|
|||
# Extended metadata fields
|
||||
mimeType: Optional[str] = Field(default=None, description="MIME type", json_schema_extra={"label": "MIME-Typ"})
|
||||
size: Optional[str] = Field(default=None, description="File size", json_schema_extra={"label": "Größe"})
|
||||
created: Optional[str] = Field(default=None, description="Creation timestamp", json_schema_extra={"label": "Erstellt"})
|
||||
modified: Optional[str] = Field(default=None, description="Modification timestamp", json_schema_extra={"label": "Geändert"})
|
||||
created: Optional[float] = Field(default=None, description="Creation timestamp (UTC unix)", json_schema_extra={"label": "Erstellt", "frontend_type": "timestamp"})
|
||||
modified: Optional[float] = Field(default=None, description="Modification timestamp (UTC unix)", json_schema_extra={"label": "Geändert", "frontend_type": "timestamp"})
|
||||
typeGroup: Optional[str] = Field(default=None, description="Document type group", json_schema_extra={"label": "Typgruppe"})
|
||||
documentId: Optional[str] = Field(default=None, description="Document ID", json_schema_extra={"label": "Dokument-ID"})
|
||||
reference: Optional[str] = Field(default=None, description="Document reference", json_schema_extra={"label": "Referenz"})
|
||||
|
|
@ -332,7 +332,7 @@ class ActionItem(BaseModel):
|
|||
retryCount: int = Field(default=0, description="Number of retries attempted", json_schema_extra={"label": "Wiederholungen"})
|
||||
retryMax: int = Field(default=3, description="Maximum number of retries", json_schema_extra={"label": "Max. Wiederholungen"})
|
||||
processingTime: Optional[float] = Field(None, description="Processing time in seconds", json_schema_extra={"label": "Bearbeitungszeit"})
|
||||
timestamp: float = Field(..., description="When the action was executed (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel"})
|
||||
timestamp: float = Field(..., description="When the action was executed (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp"})
|
||||
result: Optional[str] = Field(None, description="Result of the action", json_schema_extra={"label": "Ergebnis"})
|
||||
|
||||
def setSuccess(self, result: str = None) -> None:
|
||||
|
|
@ -361,13 +361,13 @@ class TaskItem(BaseModel):
|
|||
workflowId: str = Field(
|
||||
...,
|
||||
description="Workflow ID",
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||
)
|
||||
userInput: str = Field(..., description="User input that triggered the task", json_schema_extra={"label": "Benutzereingabe"})
|
||||
status: TaskStatus = Field(default=TaskStatus.PENDING, description="Task status", json_schema_extra={"label": "Status"})
|
||||
error: Optional[str] = Field(None, description="Error message if task failed", json_schema_extra={"label": "Fehler"})
|
||||
startedAt: Optional[float] = Field(None, description="When the task started (UTC timestamp in seconds)", json_schema_extra={"label": "Gestartet am"})
|
||||
finishedAt: Optional[float] = Field(None, description="When the task finished (UTC timestamp in seconds)", json_schema_extra={"label": "Beendet am"})
|
||||
startedAt: Optional[float] = Field(None, description="When the task started (UTC timestamp in seconds)", json_schema_extra={"label": "Gestartet am", "frontend_type": "timestamp"})
|
||||
finishedAt: Optional[float] = Field(None, description="When the task finished (UTC timestamp in seconds)", json_schema_extra={"label": "Beendet am", "frontend_type": "timestamp"})
|
||||
actionList: List[ActionItem] = Field(default_factory=list, description="List of actions to execute", json_schema_extra={"label": "Aktionen"})
|
||||
retryCount: int = Field(default=0, description="Number of retries attempted", json_schema_extra={"label": "Wiederholungen"})
|
||||
retryMax: int = Field(default=3, description="Maximum number of retries", json_schema_extra={"label": "Max. Wiederholungen"})
|
||||
|
|
@ -402,7 +402,7 @@ class TaskHandover(BaseModel):
|
|||
improvements: List[str] = Field(default_factory=list, description="Improvement suggestions", json_schema_extra={"label": "Verbesserungen"})
|
||||
workflowSummary: Optional[str] = Field(None, description="Summarized workflow context", json_schema_extra={"label": "Workflow-Zusammenfassung"})
|
||||
messageHistory: List[str] = Field(default_factory=list, description="Key message summaries", json_schema_extra={"label": "Nachrichtenverlauf"})
|
||||
timestamp: float = Field(..., description="When the handover was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel"})
|
||||
timestamp: float = Field(..., description="When the handover was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp"})
|
||||
handoverType: str = Field(default="task", description="Type of handover: task, phase, or workflow", json_schema_extra={"label": "Übergabetyp"})
|
||||
|
||||
class TaskContext(BaseModel):
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class ContentObject(BaseModel):
|
|||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
fileId: str = Field(
|
||||
description="FK to the physical file",
|
||||
json_schema_extra={"fk_target": {"db": "poweron_management", "table": "FileItem"}},
|
||||
json_schema_extra={"fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"}},
|
||||
)
|
||||
contentType: str = Field(description="text, image, videostream, audiostream, other")
|
||||
data: str = Field(default="", description="Content data (text, base64, URL)")
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class DataSource(PowerOnModel):
|
|||
)
|
||||
connectionId: str = Field(
|
||||
description="FK to UserConnection",
|
||||
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection"}},
|
||||
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection", "labelField": "externalUsername"}},
|
||||
)
|
||||
sourceType: str = Field(
|
||||
description="sharepointFolder, googleDriveFolder, outlookFolder, ftpFolder, clickupList (path under /team/...)",
|
||||
|
|
@ -45,17 +45,17 @@ class DataSource(PowerOnModel):
|
|||
featureInstanceId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Scoped to feature instance",
|
||||
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate scope",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
userId: str = Field(
|
||||
default="",
|
||||
description="Owner user ID",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
autoSync: bool = Field(
|
||||
default=False,
|
||||
|
|
@ -65,7 +65,7 @@ class DataSource(PowerOnModel):
|
|||
lastSynced: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Last sync timestamp",
|
||||
json_schema_extra={"label": "Letzter Sync"},
|
||||
json_schema_extra={"label": "Letzter Sync", "frontend_type": "timestamp"},
|
||||
)
|
||||
scope: str = Field(
|
||||
default="personal",
|
||||
|
|
@ -91,5 +91,9 @@ class ExternalEntry(BaseModel):
|
|||
isFolder: bool = Field(default=False, description="True if directory/folder")
|
||||
size: Optional[int] = Field(default=None, description="File size in bytes")
|
||||
mimeType: Optional[str] = Field(default=None, description="MIME type (files only)")
|
||||
lastModified: Optional[float] = Field(default=None, description="Last modification timestamp")
|
||||
lastModified: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Last modification timestamp",
|
||||
json_schema_extra={"frontend_type": "timestamp"},
|
||||
)
|
||||
metadata: Dict[str, Any] = Field(default_factory=dict, description="Provider-specific metadata")
|
||||
|
|
|
|||
|
|
@ -23,11 +23,11 @@ class FeatureDataSource(PowerOnModel):
|
|||
)
|
||||
featureInstanceId: str = Field(
|
||||
description="FK to FeatureInstance",
|
||||
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
featureCode: str = Field(
|
||||
description="Feature code (e.g. trustee, commcoach)",
|
||||
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
|
||||
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"}},
|
||||
)
|
||||
tableName: str = Field(
|
||||
description="Table name from DATA_OBJECTS meta (e.g. TrusteePosition)",
|
||||
|
|
@ -44,16 +44,16 @@ class FeatureDataSource(PowerOnModel):
|
|||
mandateId: str = Field(
|
||||
default="",
|
||||
description="Mandate scope",
|
||||
json_schema_extra={"label": "Mandant", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandant", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
userId: str = Field(
|
||||
default="",
|
||||
description="Owner user ID",
|
||||
json_schema_extra={"label": "Benutzer", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
workspaceInstanceId: str = Field(
|
||||
description="Workspace feature instance where this source is used",
|
||||
json_schema_extra={"label": "Workspace", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Workspace", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
scope: str = Field(
|
||||
default="personal",
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ class FeatureInstance(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"},
|
||||
},
|
||||
)
|
||||
mandateId: str = Field(
|
||||
|
|
@ -53,7 +53,7 @@ class FeatureInstance(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
label: str = Field(
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ class FileFolder(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_management", "table": "FileFolder"},
|
||||
"fk_target": {"db": "poweron_management", "table": "FileFolder", "labelField": "name"},
|
||||
},
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
|
|
@ -40,7 +40,7 @@ class FileFolder(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
|
|
@ -51,7 +51,7 @@ class FileFolder(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
scope: str = Field(
|
||||
|
|
|
|||
|
|
@ -30,9 +30,7 @@ class FileItem(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_model": "Mandate",
|
||||
"fk_label_field": "label",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
|
|
@ -43,9 +41,7 @@ class FileItem(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_model": "FeatureInstance",
|
||||
"fk_label_field": "label",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
mimeType: str = Field(
|
||||
|
|
@ -80,7 +76,7 @@ class FileItem(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_management", "table": "FileFolder"},
|
||||
"fk_target": {"db": "poweron_management", "table": "FileFolder", "labelField": "name"},
|
||||
},
|
||||
)
|
||||
description: Optional[str] = Field(
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ class Invitation(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
|
|
@ -48,7 +48,7 @@ class Invitation(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
roleIds: List[str] = Field(
|
||||
|
|
@ -80,7 +80,7 @@ class Invitation(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
usedAt: Optional[float] = Field(
|
||||
|
|
|
|||
|
|
@ -30,17 +30,17 @@ class FileContentIndex(PowerOnModel):
|
|||
)
|
||||
userId: str = Field(
|
||||
description="Owner user ID",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
default="",
|
||||
description="Feature instance scope",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
mandateId: str = Field(
|
||||
default="",
|
||||
description="Mandate scope",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
fileName: str = Field(
|
||||
description="Original file name",
|
||||
|
|
@ -78,7 +78,7 @@ class FileContentIndex(PowerOnModel):
|
|||
extractedAt: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="Extraction timestamp",
|
||||
json_schema_extra={"label": "Extrahiert am"},
|
||||
json_schema_extra={"label": "Extrahiert am", "frontend_type": "timestamp"},
|
||||
)
|
||||
status: str = Field(
|
||||
default="pending",
|
||||
|
|
@ -116,16 +116,16 @@ class ContentChunk(PowerOnModel):
|
|||
)
|
||||
fileId: str = Field(
|
||||
description="FK to the source file",
|
||||
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem"}},
|
||||
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"}},
|
||||
)
|
||||
userId: str = Field(
|
||||
description="Owner user ID",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
default="",
|
||||
description="Feature instance scope",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
contentType: str = Field(
|
||||
description="Content type: text, image, videostream, audiostream, other",
|
||||
|
|
@ -214,16 +214,16 @@ class WorkflowMemory(PowerOnModel):
|
|||
)
|
||||
workflowId: str = Field(
|
||||
description="FK to the workflow",
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||
)
|
||||
userId: str = Field(
|
||||
description="Owner user ID",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
default="",
|
||||
description="Feature instance scope",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
key: str = Field(
|
||||
description="Key identifier (e.g. 'entity:companyName')",
|
||||
|
|
|
|||
|
|
@ -31,9 +31,7 @@ class UserMandate(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"fk_model": "User",
|
||||
"fk_label_field": "username",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
mandateId: str = Field(
|
||||
|
|
@ -43,9 +41,7 @@ class UserMandate(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"fk_model": "Mandate",
|
||||
"fk_label_field": "label",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
enabled: bool = Field(
|
||||
|
|
@ -73,9 +69,7 @@ class FeatureAccess(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"fk_model": "User",
|
||||
"fk_label_field": "username",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -85,9 +79,7 @@ class FeatureAccess(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"fk_model": "FeatureInstance",
|
||||
"fk_label_field": "label",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
enabled: bool = Field(
|
||||
|
|
@ -115,7 +107,7 @@ class UserMandateRole(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "UserMandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserMandate", "labelField": None},
|
||||
},
|
||||
)
|
||||
roleId: str = Field(
|
||||
|
|
@ -125,9 +117,7 @@ class UserMandateRole(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"fk_model": "Role",
|
||||
"fk_label_field": "roleLabel",
|
||||
"fk_target": {"db": "poweron_app", "table": "Role"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Role", "labelField": "roleLabel"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -150,7 +140,7 @@ class FeatureAccessRole(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureAccess"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureAccess", "labelField": None},
|
||||
},
|
||||
)
|
||||
roleId: str = Field(
|
||||
|
|
@ -160,8 +150,6 @@ class FeatureAccessRole(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"fk_model": "Role",
|
||||
"fk_label_field": "roleLabel",
|
||||
"fk_target": {"db": "poweron_app", "table": "Role"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Role", "labelField": "roleLabel"},
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ class MessagingSubscription(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandanten-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -74,7 +74,7 @@ class MessagingSubscription(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Feature-Instanz-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
description: Optional[str] = Field(
|
||||
|
|
@ -131,7 +131,7 @@ class MessagingSubscriptionRegistration(BaseModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandanten-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -141,7 +141,7 @@ class MessagingSubscriptionRegistration(BaseModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Feature-Instanz-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
subscriptionId: str = Field(
|
||||
|
|
@ -160,7 +160,7 @@ class MessagingSubscriptionRegistration(BaseModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Benutzer-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
channel: MessagingChannel = Field(
|
||||
|
|
@ -249,7 +249,7 @@ class MessagingDelivery(BaseModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Benutzer-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
channel: MessagingChannel = Field(
|
||||
|
|
@ -296,7 +296,7 @@ class MessagingDelivery(BaseModel):
|
|||
default=None,
|
||||
description="When the delivery was sent (UTC timestamp in seconds)",
|
||||
json_schema_extra={
|
||||
"frontend_type": "datetime",
|
||||
"frontend_type": "timestamp",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Gesendet am",
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ class UserNotification(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -63,9 +63,7 @@ class Role(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_visible": True,
|
||||
"frontend_required": False,
|
||||
"fk_model": "Mandate",
|
||||
"fk_label_field": "label",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
|
|
@ -77,9 +75,7 @@ class Role(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_visible": True,
|
||||
"frontend_required": False,
|
||||
"fk_model": "FeatureInstance",
|
||||
"fk_label_field": "label",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureCode: Optional[str] = Field(
|
||||
|
|
@ -115,9 +111,7 @@ class AccessRule(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_model": "Role",
|
||||
"fk_label_field": "roleLabel",
|
||||
"fk_target": {"db": "poweron_app", "table": "Role"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Role", "labelField": "roleLabel"},
|
||||
},
|
||||
)
|
||||
context: AccessRuleContext = Field(
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ class Token(PowerOnModel):
|
|||
)
|
||||
userId: str = Field(
|
||||
...,
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
authority: AuthAuthority = Field(
|
||||
...,
|
||||
|
|
@ -56,7 +56,7 @@ class Token(PowerOnModel):
|
|||
connectionId: Optional[str] = Field(
|
||||
None,
|
||||
description="ID of the connection this token belongs to",
|
||||
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection"}},
|
||||
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection", "labelField": "externalUsername"}},
|
||||
)
|
||||
tokenPurpose: Optional[TokenPurpose] = Field(
|
||||
default=None,
|
||||
|
|
@ -73,7 +73,7 @@ class Token(PowerOnModel):
|
|||
)
|
||||
expiresAt: float = Field(
|
||||
description="When the token expires (UTC timestamp in seconds)",
|
||||
json_schema_extra={"label": "Laeuft ab am"},
|
||||
json_schema_extra={"label": "Laeuft ab am", "frontend_type": "timestamp"},
|
||||
)
|
||||
tokenRefresh: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -87,12 +87,12 @@ class Token(PowerOnModel):
|
|||
revokedAt: Optional[float] = Field(
|
||||
None,
|
||||
description="When the token was revoked (UTC timestamp in seconds)",
|
||||
json_schema_extra={"label": "Widerrufen am"},
|
||||
json_schema_extra={"label": "Widerrufen am", "frontend_type": "timestamp"},
|
||||
)
|
||||
revokedBy: Optional[str] = Field(
|
||||
None,
|
||||
description="User ID who revoked the token (admin/self)",
|
||||
json_schema_extra={"label": "Widerrufen von", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Widerrufen von", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
reason: Optional[str] = Field(
|
||||
None,
|
||||
|
|
@ -139,7 +139,7 @@ class AuthEvent(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
eventType: str = Field(
|
||||
|
|
@ -149,7 +149,7 @@ class AuthEvent(PowerOnModel):
|
|||
timestamp: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="Unix timestamp when the event occurred",
|
||||
json_schema_extra={"label": "Zeitstempel", "frontend_type": "datetime", "frontend_readonly": True, "frontend_required": True},
|
||||
json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": True},
|
||||
)
|
||||
ipAddress: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
|
|||
|
|
@ -207,7 +207,7 @@ class MandateSubscription(PowerOnModel):
|
|||
mandateId: str = Field(
|
||||
...,
|
||||
description="Foreign key to Mandate",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
planKey: str = Field(
|
||||
...,
|
||||
|
|
@ -226,35 +226,35 @@ class MandateSubscription(PowerOnModel):
|
|||
json_schema_extra={"label": "Wiederkehrend"},
|
||||
)
|
||||
|
||||
startedAt: datetime = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc),
|
||||
description="Record creation timestamp",
|
||||
json_schema_extra={"label": "Gestartet"},
|
||||
startedAt: float = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc).timestamp(),
|
||||
description="Record creation timestamp (UTC unix)",
|
||||
json_schema_extra={"label": "Gestartet", "frontend_type": "timestamp"},
|
||||
)
|
||||
effectiveFrom: Optional[datetime] = Field(
|
||||
effectiveFrom: Optional[float] = Field(
|
||||
None,
|
||||
description="When this subscription becomes operative. None = immediate. Set for SCHEDULED subs.",
|
||||
json_schema_extra={"label": "Wirksam ab"},
|
||||
description="When this subscription becomes operative (UTC unix). None = immediate.",
|
||||
json_schema_extra={"label": "Wirksam ab", "frontend_type": "timestamp"},
|
||||
)
|
||||
endedAt: Optional[datetime] = Field(
|
||||
endedAt: Optional[float] = Field(
|
||||
None,
|
||||
description="When subscription ended (terminal)",
|
||||
json_schema_extra={"label": "Beendet"},
|
||||
description="When subscription ended (UTC unix)",
|
||||
json_schema_extra={"label": "Beendet", "frontend_type": "timestamp"},
|
||||
)
|
||||
currentPeriodStart: Optional[datetime] = Field(
|
||||
currentPeriodStart: Optional[float] = Field(
|
||||
None,
|
||||
description="Current billing period start (synced from Stripe)",
|
||||
json_schema_extra={"label": "Periodenbeginn"},
|
||||
description="Current billing period start (UTC unix, synced from Stripe)",
|
||||
json_schema_extra={"label": "Periodenbeginn", "frontend_type": "timestamp"},
|
||||
)
|
||||
currentPeriodEnd: Optional[datetime] = Field(
|
||||
currentPeriodEnd: Optional[float] = Field(
|
||||
None,
|
||||
description="Current billing period end (synced from Stripe)",
|
||||
json_schema_extra={"label": "Periodenende"},
|
||||
description="Current billing period end (UTC unix, synced from Stripe)",
|
||||
json_schema_extra={"label": "Periodenende", "frontend_type": "timestamp"},
|
||||
)
|
||||
trialEndsAt: Optional[datetime] = Field(
|
||||
trialEndsAt: Optional[float] = Field(
|
||||
None,
|
||||
description="Trial expiry timestamp",
|
||||
json_schema_extra={"label": "Trial endet"},
|
||||
description="Trial expiry timestamp (UTC unix)",
|
||||
json_schema_extra={"label": "Trial endet", "frontend_type": "timestamp"},
|
||||
)
|
||||
|
||||
snapshotPricePerUserCHF: float = Field(
|
||||
|
|
|
|||
|
|
@ -397,9 +397,7 @@ class UserConnection(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Benutzer-ID",
|
||||
"fk_model": "User",
|
||||
"fk_label_field": "username",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
authority: AuthAuthority = Field(
|
||||
|
|
@ -648,7 +646,7 @@ class UserInDB(User):
|
|||
resetTokenExpires: Optional[float] = Field(
|
||||
None,
|
||||
description="Reset token expiration (UTC timestamp in seconds)",
|
||||
json_schema_extra={"label": "Token läuft ab"},
|
||||
json_schema_extra={"label": "Token läuft ab", "frontend_type": "timestamp"},
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -689,12 +687,12 @@ class UserVoicePreferences(PowerOnModel):
|
|||
)
|
||||
userId: str = Field(
|
||||
description="User ID",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate scope (None = global for user)",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
sttLanguage: str = Field(
|
||||
default="de-DE",
|
||||
|
|
|
|||
|
|
@ -14,8 +14,8 @@ from modules.datamodels.datamodelExtraction import ContentExtracted, ContentPart
|
|||
class UdmMetadata(BaseModel):
|
||||
title: Optional[str] = None
|
||||
author: Optional[str] = None
|
||||
createdAt: Optional[str] = None
|
||||
modifiedAt: Optional[str] = None
|
||||
createdAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
modifiedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
sourcePath: str = ""
|
||||
tags: List[str] = Field(default_factory=list)
|
||||
custom: Dict[str, Any] = Field(default_factory=dict)
|
||||
|
|
|
|||
|
|
@ -27,9 +27,7 @@ class Prompt(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_model": "Mandate",
|
||||
"fk_label_field": "label",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
isSystem: bool = Field(
|
||||
|
|
|
|||
199
modules/datamodels/datamodelViews.py
Normal file
199
modules/datamodels/datamodelViews.py
Normal file
|
|
@ -0,0 +1,199 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
View models for the /api/attributes/ endpoint.
|
||||
|
||||
These extend base DB models with computed / enriched fields that the gateway
|
||||
adds at response time (JOINs, aggregations, synthetics). They are NEVER used
|
||||
for DB operations — only for ``getModelAttributeDefinitions()`` so the frontend
|
||||
can resolve column types via ``resolveColumnTypes`` without hardcoding.
|
||||
|
||||
Naming convention: ``{BaseModel}View``.
|
||||
|
||||
``getModelClasses()`` in ``attributeUtils.py`` auto-discovers every
|
||||
``datamodel*.py`` under ``modules/datamodels/`` — so placing them here is
|
||||
sufficient for registration.
|
||||
"""
|
||||
|
||||
from typing import Optional, List
|
||||
from pydantic import Field
|
||||
|
||||
from modules.datamodels.datamodelBase import MODEL_REGISTRY, PowerOnModel
|
||||
from modules.datamodels.datamodelMembership import UserMandate, FeatureAccess
|
||||
from modules.datamodels.datamodelBilling import BillingTransaction
|
||||
from modules.datamodels.datamodelSubscription import MandateSubscription
|
||||
from modules.datamodels.datamodelUiLanguage import UiLanguageSet
|
||||
from modules.features.neutralization.datamodelFeatureNeutralizer import DataNeutralizerAttributes
|
||||
from modules.shared.i18nRegistry import i18nModel
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 1a: UserMandate + enriched user fields
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Benutzer-Mandant (Ansicht)")
|
||||
class UserMandateView(UserMandate):
|
||||
"""UserMandate erweitert um aufgeloeste Benutzerfelder und Rollenlabels."""
|
||||
|
||||
username: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Username (resolved from userId)",
|
||||
json_schema_extra={"label": "Benutzername", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
email: Optional[str] = Field(
|
||||
default=None,
|
||||
description="E-Mail address (resolved from userId)",
|
||||
json_schema_extra={"label": "E-Mail", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
fullName: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Full name (resolved from userId)",
|
||||
json_schema_extra={"label": "Vollstaendiger Name", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
roleLabels: Optional[List[str]] = Field(
|
||||
default=None,
|
||||
description="Role labels (resolved from junction table)",
|
||||
json_schema_extra={"label": "Rollen", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 1b: FeatureAccess + enriched user fields
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Feature-Zugang (Ansicht)")
|
||||
class FeatureAccessView(FeatureAccess):
|
||||
"""FeatureAccess erweitert um aufgeloeste Benutzerfelder und Rollenlabels."""
|
||||
|
||||
username: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Username (resolved from userId)",
|
||||
json_schema_extra={"label": "Benutzername", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
email: Optional[str] = Field(
|
||||
default=None,
|
||||
description="E-Mail address (resolved from userId)",
|
||||
json_schema_extra={"label": "E-Mail", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
fullName: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Full name (resolved from userId)",
|
||||
json_schema_extra={"label": "Vollstaendiger Name", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
roleLabels: Optional[List[str]] = Field(
|
||||
default=None,
|
||||
description="Role labels (resolved from junction table)",
|
||||
json_schema_extra={"label": "Rollen", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 1d: BillingTransaction + enriched mandate/user names
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Transaktion (Ansicht)")
|
||||
class BillingTransactionView(BillingTransaction):
|
||||
"""BillingTransaction erweitert um aufgeloeste Mandanten-/Benutzernamen."""
|
||||
|
||||
mandateName: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate name (resolved from accountId/mandateId)",
|
||||
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
userName: Optional[str] = Field(
|
||||
default=None,
|
||||
description="User name (resolved from createdByUserId)",
|
||||
json_schema_extra={"label": "Benutzer", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 3a: MandateSubscription + aggregated fields
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Abonnement (Ansicht)")
|
||||
class MandateSubscriptionView(MandateSubscription):
|
||||
"""MandateSubscription erweitert um aggregierte Laufzeitwerte."""
|
||||
|
||||
mandateName: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate name (resolved from mandateId)",
|
||||
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
planTitle: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Plan title (resolved from planKey)",
|
||||
json_schema_extra={"label": "Plan", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
activeUsers: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of active users in the mandate",
|
||||
json_schema_extra={"label": "Benutzer", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
activeInstances: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of active feature instances in the mandate",
|
||||
json_schema_extra={"label": "Module", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
monthlyRevenueCHF: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Calculated monthly revenue in CHF",
|
||||
json_schema_extra={"label": "Umsatz pro Monat", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 3b: UiLanguageSet + computed counts
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Sprachset (Ansicht)")
|
||||
class UiLanguageSetView(UiLanguageSet):
|
||||
"""UiLanguageSet erweitert um berechnete Uebersetzungszaehler."""
|
||||
|
||||
uiCount: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of UI translation entries",
|
||||
json_schema_extra={"label": "UI", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
gatewayCount: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of gateway/API translation entries",
|
||||
json_schema_extra={"label": "API", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
entriesCount: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Total number of translation entries",
|
||||
json_schema_extra={"label": "Gesamt", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 1c: DataNeutralizerAttributes + enriched fields
|
||||
#
|
||||
# DataNeutralizerAttributes extends BaseModel (not PowerOnModel), so its
|
||||
# subclass does NOT auto-register in MODEL_REGISTRY. We register manually.
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Neutralisierungs-Zuordnung (Ansicht)")
|
||||
class DataNeutralizerAttributesView(DataNeutralizerAttributes):
|
||||
"""DataNeutralizerAttributes erweitert um synthetische/aufgeloeste Felder."""
|
||||
|
||||
placeholder: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Synthetic placeholder string [patternType.id]",
|
||||
json_schema_extra={"label": "Platzhalter", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
username: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Username (resolved from userId)",
|
||||
json_schema_extra={"label": "Benutzer", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
instanceLabel: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature instance label (resolved from featureInstanceId)",
|
||||
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# Manual registration for non-PowerOnModel view
|
||||
MODEL_REGISTRY["DataNeutralizerAttributesView"] = DataNeutralizerAttributesView # type: ignore[assignment]
|
||||
|
|
@ -503,11 +503,12 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
if monthlyRent <= 0:
|
||||
continue
|
||||
for month in range(1, 13):
|
||||
bookingDate = f"{year}-{month:02d}-01"
|
||||
from datetime import datetime as _dtCls, timezone as _tzCls
|
||||
bookingTs = _dtCls(year, month, 1, tzinfo=_tzCls.utc).timestamp()
|
||||
entryRef = f"PWG-{tenant.get('contactNumber')}-{year}{month:02d}"
|
||||
entry = TrusteeDataJournalEntry(
|
||||
externalId=entryRef,
|
||||
bookingDate=bookingDate,
|
||||
bookingDate=bookingTs,
|
||||
reference=entryRef,
|
||||
description=f"Mietzins {month:02d}/{year} {name}",
|
||||
currency="CHF",
|
||||
|
|
|
|||
|
|
@ -35,17 +35,6 @@ from modules.features.chatbot.mainChatbot import getEventManager
|
|||
from modules.shared.i18nRegistry import apiRouteContext
|
||||
routeApiMsg = apiRouteContext("routeFeatureChatbot")
|
||||
|
||||
# Pre-warm AI connectors when this router loads (before first request).
|
||||
# Ensures connectors are ready; avoids 4–8 s delay on first chatbot message.
|
||||
try:
|
||||
import modules.aicore.aicoreModelRegistry # noqa: F401
|
||||
from modules.aicore.aicoreModelRegistry import modelRegistry
|
||||
modelRegistry.ensureConnectorsRegistered()
|
||||
modelRegistry.refreshModels(force=True)
|
||||
logging.getLogger(__name__).info("Chatbot router: AI connectors pre-warmed")
|
||||
except Exception as e:
|
||||
logging.getLogger(__name__).warning(f"Chatbot AI pre-warm failed: {e}")
|
||||
|
||||
# Configure logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ class CoachingContext(PowerOnModel):
|
|||
metadata: Optional[str] = Field(default=None, description="JSON object with flexible metadata")
|
||||
sessionCount: int = Field(default=0)
|
||||
taskCount: int = Field(default=0)
|
||||
lastSessionAt: Optional[str] = Field(default=None)
|
||||
lastSessionAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
rollingOverview: Optional[str] = Field(default=None, description="AI summary of older sessions for long context history")
|
||||
rollingOverviewUpToSessionCount: Optional[int] = Field(default=None, description="Session count covered by rollingOverview")
|
||||
|
||||
|
|
@ -113,8 +113,8 @@ class CoachingSession(PowerOnModel):
|
|||
messageCount: int = Field(default=0)
|
||||
competenceScore: Optional[float] = Field(default=None, ge=0.0, le=100.0)
|
||||
emailSent: bool = Field(default=False)
|
||||
startedAt: Optional[str] = Field(default=None)
|
||||
endedAt: Optional[str] = Field(default=None)
|
||||
startedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
endedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
|
||||
class CoachingMessage(PowerOnModel):
|
||||
|
|
@ -141,8 +141,8 @@ class CoachingTask(PowerOnModel):
|
|||
description: Optional[str] = Field(default=None)
|
||||
status: CoachingTaskStatus = Field(default=CoachingTaskStatus.OPEN)
|
||||
priority: CoachingTaskPriority = Field(default=CoachingTaskPriority.MEDIUM)
|
||||
dueDate: Optional[str] = Field(default=None)
|
||||
completedAt: Optional[str] = Field(default=None)
|
||||
dueDate: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "date"})
|
||||
completedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
|
||||
class CoachingScore(PowerOnModel):
|
||||
|
|
@ -171,7 +171,7 @@ class CoachingUserProfile(PowerOnModel):
|
|||
longestStreak: int = Field(default=0)
|
||||
totalSessions: int = Field(default=0)
|
||||
totalMinutes: int = Field(default=0)
|
||||
lastSessionAt: Optional[str] = Field(default=None)
|
||||
lastSessionAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
|
||||
# ============================================================================
|
||||
|
|
@ -204,7 +204,7 @@ class CoachingBadge(PowerOnModel):
|
|||
mandateId: str = Field(description="Mandate ID")
|
||||
instanceId: str = Field(description="Feature instance ID")
|
||||
badgeKey: str = Field(description="Badge identifier, e.g. 'streak_7'")
|
||||
awardedAt: Optional[str] = Field(default=None)
|
||||
awardedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
|
||||
# ============================================================================
|
||||
|
|
@ -238,14 +238,14 @@ class CreateTaskRequest(BaseModel):
|
|||
title: str
|
||||
description: Optional[str] = None
|
||||
priority: Optional[CoachingTaskPriority] = CoachingTaskPriority.MEDIUM
|
||||
dueDate: Optional[str] = None
|
||||
dueDate: Optional[float] = None
|
||||
|
||||
|
||||
class UpdateTaskRequest(BaseModel):
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
priority: Optional[CoachingTaskPriority] = None
|
||||
dueDate: Optional[str] = None
|
||||
dueDate: Optional[float] = None
|
||||
|
||||
|
||||
class UpdateTaskStatusRequest(BaseModel):
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ from typing import Dict, Any, List, Optional
|
|||
from modules.datamodels.datamodelUam import User
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
from modules.shared.dbRegistry import registerDatabase
|
||||
from modules.shared.timeUtils import getIsoTimestamp
|
||||
from modules.shared.timeUtils import getIsoTimestamp, getUtcTimestamp
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
from modules.shared.i18nRegistry import resolveText, t
|
||||
|
||||
|
|
@ -112,7 +112,7 @@ class CommcoachObjects:
|
|||
CoachingSession,
|
||||
recordFilter={"contextId": contextId, "userId": userId},
|
||||
)
|
||||
records.sort(key=lambda r: r.get("startedAt") or r.get("createdAt") or "", reverse=True)
|
||||
records.sort(key=lambda r: r.get("startedAt") or 0, reverse=True)
|
||||
return records
|
||||
|
||||
def getSession(self, sessionId: str) -> Optional[Dict[str, Any]]:
|
||||
|
|
@ -129,7 +129,7 @@ class CommcoachObjects:
|
|||
def createSession(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
data["createdAt"] = getIsoTimestamp()
|
||||
data["updatedAt"] = getIsoTimestamp()
|
||||
data["startedAt"] = getIsoTimestamp()
|
||||
data["startedAt"] = getUtcTimestamp()
|
||||
return self.db.recordCreate(CoachingSession, data)
|
||||
|
||||
def updateSession(self, sessionId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
|
|
@ -281,7 +281,7 @@ class CommcoachObjects:
|
|||
def getBadges(self, userId: str, instanceId: str) -> List[Dict[str, Any]]:
|
||||
from .datamodelCommcoach import CoachingBadge
|
||||
records = self.db.getRecordset(CoachingBadge, recordFilter={"userId": userId, "instanceId": instanceId})
|
||||
records.sort(key=lambda r: r.get("awardedAt") or "", reverse=True)
|
||||
records.sort(key=lambda r: r.get("awardedAt") or 0, reverse=True)
|
||||
return records
|
||||
|
||||
def hasBadge(self, userId: str, instanceId: str, badgeKey: str) -> bool:
|
||||
|
|
@ -291,7 +291,7 @@ class CommcoachObjects:
|
|||
|
||||
def awardBadge(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
from .datamodelCommcoach import CoachingBadge
|
||||
data["awardedAt"] = getIsoTimestamp()
|
||||
data["awardedAt"] = getUtcTimestamp()
|
||||
data["createdAt"] = getIsoTimestamp()
|
||||
return self.db.recordCreate(CoachingBadge, data)
|
||||
|
||||
|
|
|
|||
|
|
@ -471,10 +471,10 @@ async def cancelSession(
|
|||
raise HTTPException(status_code=404, detail=routeApiMsg("Session not found"))
|
||||
_validateOwnership(session, context)
|
||||
|
||||
from modules.shared.timeUtils import getIsoTimestamp
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
interface.updateSession(sessionId, {
|
||||
"status": CoachingSessionStatus.CANCELLED.value,
|
||||
"endedAt": getIsoTimestamp(),
|
||||
"endedAt": getUtcTimestamp(),
|
||||
})
|
||||
return {"cancelled": True}
|
||||
|
||||
|
|
@ -768,8 +768,8 @@ async def updateTaskStatus(
|
|||
|
||||
updates = {"status": body.status.value}
|
||||
if body.status == CoachingTaskStatus.DONE:
|
||||
from modules.shared.timeUtils import getIsoTimestamp
|
||||
updates["completedAt"] = getIsoTimestamp()
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
updates["completedAt"] = getUtcTimestamp()
|
||||
|
||||
updated = interface.updateTask(taskId, updates)
|
||||
return {"task": updated}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ from typing import Optional, Dict, Any, List
|
|||
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
|
||||
from modules.shared.timeUtils import getIsoTimestamp
|
||||
from modules.shared.timeUtils import getIsoTimestamp, getUtcTimestamp
|
||||
|
||||
from .datamodelCommcoach import (
|
||||
CoachingMessage, CoachingMessageRole, CoachingMessageContentType,
|
||||
|
|
@ -1107,7 +1107,7 @@ class CommcoachService:
|
|||
if len(messages) < 2:
|
||||
interface.updateSession(sessionId, {
|
||||
"status": CoachingSessionStatus.COMPLETED.value,
|
||||
"endedAt": getIsoTimestamp(),
|
||||
"endedAt": getUtcTimestamp(),
|
||||
"compressedHistorySummary": None,
|
||||
"compressedHistoryUpToMessageCount": None,
|
||||
})
|
||||
|
|
@ -1252,21 +1252,18 @@ class CommcoachService:
|
|||
logger.warning(f"Coaching session indexing failed (non-blocking): {e}")
|
||||
|
||||
# Calculate duration
|
||||
startedAt = session.get("startedAt", "")
|
||||
startedAt = session.get("startedAt")
|
||||
durationSeconds = 0
|
||||
if startedAt:
|
||||
try:
|
||||
from datetime import datetime
|
||||
start = datetime.fromisoformat(startedAt.replace("Z", "+00:00"))
|
||||
end = datetime.now(start.tzinfo) if start.tzinfo else datetime.now()
|
||||
durationSeconds = int((end - start).total_seconds())
|
||||
except Exception:
|
||||
pass
|
||||
from datetime import datetime, timezone
|
||||
start = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||
end = datetime.now(timezone.utc)
|
||||
durationSeconds = int((end - start).total_seconds())
|
||||
|
||||
# Update session - clear compressed history so it never leaks into new sessions
|
||||
sessionUpdates = {
|
||||
"status": CoachingSessionStatus.COMPLETED.value,
|
||||
"endedAt": getIsoTimestamp(),
|
||||
"endedAt": getUtcTimestamp(),
|
||||
"summary": summary,
|
||||
"durationSeconds": durationSeconds,
|
||||
"messageCount": len(messages),
|
||||
|
|
@ -1285,7 +1282,7 @@ class CommcoachService:
|
|||
completedCount = len([s for s in allSessions if s.get("status") == CoachingSessionStatus.COMPLETED.value])
|
||||
interface.updateContext(contextId, {
|
||||
"sessionCount": completedCount,
|
||||
"lastSessionAt": getIsoTimestamp(),
|
||||
"lastSessionAt": getUtcTimestamp(),
|
||||
})
|
||||
|
||||
# Update user profile streak
|
||||
|
|
@ -1324,26 +1321,23 @@ class CommcoachService:
|
|||
if not profile:
|
||||
profile = interface.getOrCreateProfile(self.userId, self.mandateId, self.instanceId)
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timezone
|
||||
|
||||
lastSessionAt = profile.get("lastSessionAt")
|
||||
currentStreak = profile.get("streakDays", 0)
|
||||
longestStreak = profile.get("longestStreak", 0)
|
||||
totalSessions = profile.get("totalSessions", 0)
|
||||
|
||||
today = datetime.now().date()
|
||||
today = datetime.now(timezone.utc).date()
|
||||
isConsecutive = False
|
||||
|
||||
if lastSessionAt:
|
||||
try:
|
||||
lastDate = datetime.fromisoformat(lastSessionAt.replace("Z", "+00:00")).date()
|
||||
diff = (today - lastDate).days
|
||||
if diff == 1:
|
||||
isConsecutive = True
|
||||
elif diff == 0:
|
||||
isConsecutive = True # Same day, maintain streak
|
||||
except Exception:
|
||||
pass
|
||||
lastDate = datetime.fromtimestamp(lastSessionAt, tz=timezone.utc).date()
|
||||
diff = (today - lastDate).days
|
||||
if diff == 1:
|
||||
isConsecutive = True
|
||||
elif diff == 0:
|
||||
isConsecutive = True
|
||||
|
||||
newStreak = (currentStreak + 1) if isConsecutive else 1
|
||||
newLongest = max(longestStreak, newStreak)
|
||||
|
|
@ -1352,7 +1346,7 @@ class CommcoachService:
|
|||
"streakDays": newStreak,
|
||||
"longestStreak": newLongest,
|
||||
"totalSessions": totalSessions + 1,
|
||||
"lastSessionAt": getIsoTimestamp(),
|
||||
"lastSessionAt": getUtcTimestamp(),
|
||||
})
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to update streak: {e}")
|
||||
|
|
@ -1418,14 +1412,13 @@ class CommcoachService:
|
|||
completedSessions = [s for s in allSessions if s.get("status") == CoachingSessionStatus.COMPLETED.value]
|
||||
|
||||
for s in completedSessions:
|
||||
startedAt = s.get("startedAt") or s.get("createdAt") or ""
|
||||
startedAt = s.get("startedAt")
|
||||
if startedAt:
|
||||
try:
|
||||
from datetime import datetime
|
||||
dt = datetime.fromisoformat(str(startedAt).replace("Z", "+00:00"))
|
||||
s["date"] = dt.strftime("%d.%m.%Y")
|
||||
except Exception:
|
||||
s["date"] = ""
|
||||
from datetime import datetime, timezone
|
||||
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||
s["date"] = dt.strftime("%d.%m.%Y")
|
||||
else:
|
||||
s["date"] = ""
|
||||
|
||||
result = {
|
||||
"intent": intent,
|
||||
|
|
|
|||
|
|
@ -206,14 +206,11 @@ Tool-Nutzung:
|
|||
|
||||
if retrievedSession:
|
||||
dateStr = ""
|
||||
startedAt = retrievedSession.get("startedAt") or retrievedSession.get("createdAt")
|
||||
startedAt = retrievedSession.get("startedAt")
|
||||
if startedAt:
|
||||
try:
|
||||
from datetime import datetime
|
||||
dt = datetime.fromisoformat(str(startedAt).replace("Z", "+00:00"))
|
||||
dateStr = dt.strftime("%d.%m.%Y")
|
||||
except Exception:
|
||||
pass
|
||||
from datetime import datetime, timezone
|
||||
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||
dateStr = dt.strftime("%d.%m.%Y")
|
||||
prompt += f"\n\nVom Benutzer angefragte Session ({dateStr}):"
|
||||
prompt += f"\n{retrievedSession.get('summary', '')[:500]}"
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ Intent detection, retrieval strategies, and context assembly for intelligent ses
|
|||
|
||||
import re
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, Dict, Any, List, Tuple
|
||||
from enum import Enum
|
||||
|
||||
|
|
@ -106,18 +106,15 @@ def findSessionByDate(
|
|||
for s in sessions:
|
||||
if s.get("status") != "completed":
|
||||
continue
|
||||
startedAt = s.get("startedAt") or s.get("endedAt") or s.get("createdAt")
|
||||
startedAt = s.get("startedAt") or s.get("endedAt")
|
||||
if not startedAt:
|
||||
continue
|
||||
try:
|
||||
dt = datetime.fromisoformat(startedAt.replace("Z", "+00:00"))
|
||||
sessionDate = dt.date()
|
||||
diff = abs((sessionDate - targetDateOnly).days)
|
||||
if bestDiff is None or diff < bestDiff:
|
||||
bestDiff = diff
|
||||
bestMatch = s
|
||||
except Exception:
|
||||
continue
|
||||
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||
sessionDate = dt.date()
|
||||
diff = abs((sessionDate - targetDateOnly).days)
|
||||
if bestDiff is None or diff < bestDiff:
|
||||
bestDiff = diff
|
||||
bestMatch = s
|
||||
|
||||
return bestMatch
|
||||
|
||||
|
|
@ -231,17 +228,14 @@ def buildSessionSummariesForPrompt(
|
|||
and s.get("summary")
|
||||
and s.get("id") != excludeSessionId
|
||||
]
|
||||
completed.sort(key=lambda x: x.get("startedAt") or x.get("createdAt") or "", reverse=True)
|
||||
completed.sort(key=lambda x: x.get("startedAt") or 0, reverse=True)
|
||||
result = []
|
||||
for s in completed[:limit]:
|
||||
startedAt = s.get("startedAt") or s.get("createdAt") or ""
|
||||
startedAt = s.get("startedAt")
|
||||
dateStr = ""
|
||||
if startedAt:
|
||||
try:
|
||||
dt = datetime.fromisoformat(startedAt.replace("Z", "+00:00"))
|
||||
dateStr = dt.strftime("%d.%m.%Y")
|
||||
except Exception:
|
||||
pass
|
||||
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||
dateStr = dt.strftime("%d.%m.%Y")
|
||||
result.append({
|
||||
"summary": s.get("summary", ""),
|
||||
"date": dateStr,
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ Generates Markdown and PDF exports for dossiers and sessions.
|
|||
import logging
|
||||
import json
|
||||
from typing import Dict, Any, List, Optional
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -49,7 +49,7 @@ def buildDossierMarkdown(context: Dict[str, Any], sessions: List[Dict[str, Any]]
|
|||
lines.append(f"- {text}")
|
||||
|
||||
completedSessions = [s for s in sessions if s.get("status") == "completed"]
|
||||
completedSessions.sort(key=lambda s: s.get("startedAt") or s.get("createdAt") or "")
|
||||
completedSessions.sort(key=lambda s: s.get("startedAt") or 0)
|
||||
if completedSessions:
|
||||
lines += ["", "## Sessions", ""]
|
||||
for i, s in enumerate(completedSessions, 1):
|
||||
|
|
@ -227,14 +227,14 @@ def _mdToXml(text: str) -> str:
|
|||
|
||||
|
||||
|
||||
def _formatDate(isoStr: Optional[str]) -> str:
|
||||
if not isoStr:
|
||||
return datetime.now().strftime("%d.%m.%Y")
|
||||
try:
|
||||
dt = datetime.fromisoformat(str(isoStr).replace("Z", "+00:00"))
|
||||
def _formatDate(val) -> str:
|
||||
if not val:
|
||||
return datetime.now(timezone.utc).strftime("%d.%m.%Y")
|
||||
if isinstance(val, (int, float)):
|
||||
dt = datetime.fromtimestamp(float(val), tz=timezone.utc)
|
||||
return dt.strftime("%d.%m.%Y")
|
||||
except Exception:
|
||||
return isoStr
|
||||
dt = datetime.fromisoformat(str(val).replace("Z", "+00:00"))
|
||||
return dt.strftime("%d.%m.%Y")
|
||||
|
||||
|
||||
def _parseJson(value, fallback):
|
||||
|
|
|
|||
|
|
@ -68,9 +68,7 @@ class AutoWorkflow(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandanten-ID",
|
||||
"fk_label_field": "label",
|
||||
"fk_model": "Mandate",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -80,9 +78,7 @@ class AutoWorkflow(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Feature-Instanz-ID",
|
||||
"fk_label_field": "label",
|
||||
"fk_model": "FeatureInstance",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
label: str = Field(
|
||||
|
|
@ -112,7 +108,7 @@ class AutoWorkflow(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Vorlagen-Quelle",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
templateScope: Optional[str] = Field(
|
||||
|
|
@ -133,7 +129,7 @@ class AutoWorkflow(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Aktuelle Version",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion", "labelField": "versionNumber"},
|
||||
},
|
||||
)
|
||||
active: bool = Field(
|
||||
|
|
@ -182,7 +178,7 @@ class AutoVersion(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"label": "Workflow-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
versionNumber: int = Field(
|
||||
|
|
@ -208,7 +204,7 @@ class AutoVersion(PowerOnModel):
|
|||
publishedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Timestamp when version was published",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Veröffentlicht am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Veröffentlicht am"},
|
||||
)
|
||||
publishedBy: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -218,9 +214,7 @@ class AutoVersion(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Veröffentlicht von",
|
||||
"fk_model": "User",
|
||||
"fk_label_field": "username",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -243,7 +237,7 @@ class AutoRun(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"label": "Workflow-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
label: Optional[str] = Field(
|
||||
|
|
@ -259,9 +253,7 @@ class AutoRun(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandanten-ID",
|
||||
"fk_label_field": "label",
|
||||
"fk_model": "Mandate",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
ownerId: Optional[str] = Field(
|
||||
|
|
@ -272,9 +264,7 @@ class AutoRun(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Auslöser",
|
||||
"fk_model": "User",
|
||||
"fk_label_field": "username",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
versionId: Optional[str] = Field(
|
||||
|
|
@ -285,7 +275,7 @@ class AutoRun(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Versions-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion", "labelField": "versionNumber"},
|
||||
},
|
||||
)
|
||||
status: str = Field(
|
||||
|
|
@ -301,12 +291,12 @@ class AutoRun(PowerOnModel):
|
|||
startedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Run start timestamp",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
||||
)
|
||||
completedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Run completion timestamp",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
||||
)
|
||||
nodeOutputs: Dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
|
|
@ -358,7 +348,7 @@ class AutoStepLog(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"label": "Lauf-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
nodeId: str = Field(
|
||||
|
|
@ -392,12 +382,12 @@ class AutoStepLog(PowerOnModel):
|
|||
startedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Step start timestamp",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
||||
)
|
||||
completedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Step completion timestamp",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
||||
)
|
||||
durationMs: Optional[int] = Field(
|
||||
default=None,
|
||||
|
|
@ -434,7 +424,7 @@ class AutoTask(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"label": "Lauf-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
workflowId: str = Field(
|
||||
|
|
@ -444,7 +434,7 @@ class AutoTask(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"label": "Workflow-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
nodeId: str = Field(
|
||||
|
|
@ -468,7 +458,7 @@ class AutoTask(PowerOnModel):
|
|||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"label": "Zugewiesen an",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
status: str = Field(
|
||||
|
|
@ -484,7 +474,7 @@ class AutoTask(PowerOnModel):
|
|||
expiresAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Expiration timestamp for the task",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_required": False, "label": "Läuft ab am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_required": False, "label": "Läuft ab am"},
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ class DataNeutraliserConfig(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -42,7 +42,7 @@ class DataNeutraliserConfig(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
userId: str = Field(
|
||||
|
|
@ -52,7 +52,7 @@ class DataNeutraliserConfig(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
enabled: bool = Field(
|
||||
|
|
@ -107,7 +107,7 @@ class DataNeutralizerAttributes(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -117,7 +117,7 @@ class DataNeutralizerAttributes(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
userId: str = Field(
|
||||
|
|
@ -127,7 +127,7 @@ class DataNeutralizerAttributes(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
originalText: str = Field(
|
||||
|
|
@ -142,7 +142,7 @@ class DataNeutralizerAttributes(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_management", "table": "FileItem"},
|
||||
"fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"},
|
||||
},
|
||||
)
|
||||
patternType: str = Field(
|
||||
|
|
@ -160,16 +160,16 @@ class DataNeutralizationSnapshot(BaseModel):
|
|||
)
|
||||
mandateId: str = Field(
|
||||
description="Mandate scope",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
default="",
|
||||
description="Feature instance scope",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
userId: str = Field(
|
||||
description="User who triggered neutralization",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
sourceLabel: str = Field(
|
||||
description="Human label, e.g. 'Prompt', 'Kontext', 'Nachricht 3'",
|
||||
|
|
|
|||
|
|
@ -288,7 +288,7 @@ class Kanton(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_realestate", "table": "Land"},
|
||||
"fk_target": {"db": "poweron_realestate", "table": "Land", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
abk: Optional[str] = Field(
|
||||
|
|
@ -348,7 +348,7 @@ class Gemeinde(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_realestate", "table": "Kanton"},
|
||||
"fk_target": {"db": "poweron_realestate", "table": "Kanton", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
plz: Optional[str] = Field(
|
||||
|
|
@ -398,7 +398,7 @@ class Parzelle(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandats-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -408,7 +408,7 @@ class Parzelle(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Feature-Instanz-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -472,7 +472,7 @@ class Parzelle(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_realestate", "table": "Gemeinde"},
|
||||
"fk_target": {"db": "poweron_realestate", "table": "Gemeinde", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -638,7 +638,7 @@ class Projekt(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandats-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -648,7 +648,7 @@ class Projekt(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Feature-Instanz-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
label: str = Field(
|
||||
|
|
|
|||
|
|
@ -228,31 +228,27 @@ def get_projects(
|
|||
recordFilter = {"featureInstanceId": instanceId}
|
||||
|
||||
if mode in ("filterValues", "ids"):
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels
|
||||
items = interface.getProjekte(recordFilter=recordFilter)
|
||||
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
enrichRowsWithFkLabels(itemDicts, Projekt)
|
||||
return handleFilterValuesInMemory(itemDicts, column, pagination)
|
||||
return handleIdsInMemory(itemDicts, pagination)
|
||||
|
||||
items = interface.getProjekte(recordFilter=recordFilter)
|
||||
paginationParams = _parsePagination(pagination)
|
||||
if paginationParams:
|
||||
if paginationParams.sort:
|
||||
for sort_field in reversed(paginationParams.sort):
|
||||
field_name = sort_field.field
|
||||
direction = sort_field.direction.lower()
|
||||
items.sort(
|
||||
key=lambda x: getattr(x, field_name, None),
|
||||
reverse=(direction == "desc")
|
||||
)
|
||||
total_items = len(items)
|
||||
from modules.routes.routeHelpers import applyFiltersAndSort
|
||||
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
|
||||
filtered = applyFiltersAndSort(itemDicts, paginationParams)
|
||||
total_items = len(filtered)
|
||||
total_pages = (total_items + paginationParams.pageSize - 1) // paginationParams.pageSize
|
||||
start_idx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
end_idx = start_idx + paginationParams.pageSize
|
||||
paginated_items = items[start_idx:end_idx]
|
||||
paginated_items = filtered[start_idx:end_idx]
|
||||
return PaginatedResponse(
|
||||
items=paginated_items,
|
||||
pagination=PaginationMetadata(
|
||||
|
|
@ -373,31 +369,27 @@ def get_parcels(
|
|||
recordFilter = {"featureInstanceId": instanceId}
|
||||
|
||||
if mode in ("filterValues", "ids"):
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels
|
||||
items = interface.getParzellen(recordFilter=recordFilter)
|
||||
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
enrichRowsWithFkLabels(itemDicts, Parzelle)
|
||||
return handleFilterValuesInMemory(itemDicts, column, pagination)
|
||||
return handleIdsInMemory(itemDicts, pagination)
|
||||
|
||||
items = interface.getParzellen(recordFilter=recordFilter)
|
||||
paginationParams = _parsePagination(pagination)
|
||||
if paginationParams:
|
||||
if paginationParams.sort:
|
||||
for sort_field in reversed(paginationParams.sort):
|
||||
field_name = sort_field.field
|
||||
direction = sort_field.direction.lower()
|
||||
items.sort(
|
||||
key=lambda x: getattr(x, field_name, None),
|
||||
reverse=(direction == "desc")
|
||||
)
|
||||
total_items = len(items)
|
||||
from modules.routes.routeHelpers import applyFiltersAndSort
|
||||
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
|
||||
filtered = applyFiltersAndSort(itemDicts, paginationParams)
|
||||
total_items = len(filtered)
|
||||
total_pages = (total_items + paginationParams.pageSize - 1) // paginationParams.pageSize
|
||||
start_idx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
end_idx = start_idx + paginationParams.pageSize
|
||||
paginated_items = items[start_idx:end_idx]
|
||||
paginated_items = filtered[start_idx:end_idx]
|
||||
return PaginatedResponse(
|
||||
items=paginated_items,
|
||||
pagination=PaginationMetadata(
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ class RedmineInstanceConfig(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
|
|
@ -86,7 +86,7 @@ class RedmineInstanceConfig(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
baseUrl: str = Field(
|
||||
|
|
@ -195,7 +195,7 @@ class RedmineTicketMirror(PowerOnModel):
|
|||
featureInstanceId: str = Field(
|
||||
description="FK -> FeatureInstance.id",
|
||||
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -226,14 +226,14 @@ class RedmineTicketMirror(PowerOnModel):
|
|||
closedOnTs: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Best-effort UTC epoch when the ticket transitioned to a closed status. Approximated as updatedOnTs for closed tickets at sync time; used by Stats to render the open-vs-total snapshot chart.",
|
||||
json_schema_extra={"label": "closedOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
|
||||
json_schema_extra={"label": "closedOn (epoch)", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
|
||||
)
|
||||
createdOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Erstellt am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
updatedOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Geaendert am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
createdOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from createdOn (for SQL filtering)",
|
||||
json_schema_extra={"label": "createdOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
|
||||
json_schema_extra={"label": "createdOn (epoch)", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
|
||||
updatedOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from updatedOn (for SQL filtering)",
|
||||
json_schema_extra={"label": "updatedOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
|
||||
json_schema_extra={"label": "updatedOn (epoch)", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
|
||||
customFields: Optional[List[Dict[str, Any]]] = Field(
|
||||
default=None,
|
||||
description="List of {id,name,value} as returned by Redmine; stored as JSON",
|
||||
|
|
@ -270,7 +270,7 @@ class RedmineRelationMirror(PowerOnModel):
|
|||
featureInstanceId: str = Field(
|
||||
description="FK -> FeatureInstance.id",
|
||||
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
redmineRelationId: int = Field(
|
||||
description="Redmine relation id (unique per feature instance)",
|
||||
|
|
@ -468,17 +468,17 @@ class RedmineSyncResultDto(BaseModel):
|
|||
ticketsUpserted: int = 0
|
||||
relationsUpserted: int = 0
|
||||
durationMs: int = 0
|
||||
lastSyncAt: float
|
||||
lastSyncAt: float = Field(json_schema_extra={"frontend_type": "timestamp"})
|
||||
error: Optional[str] = None
|
||||
|
||||
|
||||
class RedmineSyncStatusDto(BaseModel):
|
||||
instanceId: str
|
||||
lastSyncAt: Optional[float] = None
|
||||
lastFullSyncAt: Optional[float] = None
|
||||
lastSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
lastFullSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
lastSyncDurationMs: Optional[int] = None
|
||||
lastSyncTicketCount: Optional[int] = None
|
||||
lastSyncErrorAt: Optional[float] = None
|
||||
lastSyncErrorAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
lastSyncErrorMessage: Optional[str] = None
|
||||
mirroredTicketCount: int = 0
|
||||
mirroredRelationCount: int = 0
|
||||
|
|
@ -513,11 +513,11 @@ class RedmineConfigDto(BaseModel):
|
|||
rootTrackerName: str = "Userstory"
|
||||
defaultPeriodValue: Optional[Dict[str, Any]] = None
|
||||
schemaCacheTtlSeconds: int = 24 * 60 * 60
|
||||
schemaCachedAt: Optional[float] = None
|
||||
schemaCachedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
isActive: bool = True
|
||||
lastConnectedAt: Optional[float] = None
|
||||
lastSyncAt: Optional[float] = None
|
||||
lastFullSyncAt: Optional[float] = None
|
||||
lastConnectedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
lastSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
lastFullSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
lastSyncTicketCount: Optional[int] = None
|
||||
lastSyncErrorMessage: Optional[str] = None
|
||||
|
||||
|
|
|
|||
|
|
@ -91,8 +91,8 @@ class TeamsbotSession(PowerOnModel):
|
|||
meetingLink: str = Field(description="Teams meeting join link")
|
||||
botName: str = Field(default="AI Assistant", description="Display name of the bot in the meeting")
|
||||
status: TeamsbotSessionStatus = Field(default=TeamsbotSessionStatus.PENDING, description="Current session status")
|
||||
startedAt: Optional[str] = Field(default=None, description="ISO timestamp when session started")
|
||||
endedAt: Optional[str] = Field(default=None, description="ISO timestamp when session ended")
|
||||
startedAt: Optional[float] = Field(default=None, description="UTC unix timestamp when session started", json_schema_extra={"frontend_type": "timestamp"})
|
||||
endedAt: Optional[float] = Field(default=None, description="UTC unix timestamp when session ended", json_schema_extra={"frontend_type": "timestamp"})
|
||||
startedByUserId: str = Field(description="User ID who started the session")
|
||||
bridgeSessionId: Optional[str] = Field(default=None, description="Session ID on the .NET Media Bridge")
|
||||
meetingChatId: Optional[str] = Field(default=None, description="Teams meeting chat ID for Graph API messages")
|
||||
|
|
@ -109,7 +109,7 @@ class TeamsbotTranscript(PowerOnModel):
|
|||
sessionId: str = Field(description="Session ID (FK)")
|
||||
speaker: Optional[str] = Field(default=None, description="Speaker name or identifier")
|
||||
text: str = Field(description="Transcribed text")
|
||||
timestamp: str = Field(description="ISO timestamp of the speech segment")
|
||||
timestamp: float = Field(description="UTC unix timestamp of the speech segment", json_schema_extra={"frontend_type": "timestamp"})
|
||||
confidence: float = Field(default=0.0, ge=0.0, le=1.0, description="STT confidence score")
|
||||
language: Optional[str] = Field(default=None, description="Detected language code (e.g., de-DE)")
|
||||
isFinal: bool = Field(default=True, description="Whether this is a final or interim result")
|
||||
|
|
@ -128,7 +128,7 @@ class TeamsbotBotResponse(PowerOnModel):
|
|||
modelName: Optional[str] = Field(default=None, description="AI model used for this response")
|
||||
processingTime: float = Field(default=0.0, description="Processing time in seconds")
|
||||
priceCHF: float = Field(default=0.0, description="Cost of this AI call in CHF")
|
||||
timestamp: Optional[str] = Field(default=None, description="ISO timestamp of the response")
|
||||
timestamp: Optional[float] = Field(default=None, description="UTC unix timestamp of the response", json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
|
||||
# ============================================================================
|
||||
|
|
@ -315,8 +315,8 @@ class TeamsbotDirectorPrompt(PowerOnModel):
|
|||
fileIds: List[str] = Field(default_factory=list, description="UDB-selected file/object IDs to attach as RAG context")
|
||||
status: TeamsbotDirectorPromptStatus = Field(default=TeamsbotDirectorPromptStatus.QUEUED, description="Lifecycle status")
|
||||
statusMessage: Optional[str] = Field(default=None, description="Optional error or status detail")
|
||||
createdAt: str = Field(default_factory=lambda: datetime.now(timezone.utc).isoformat(), description="ISO timestamp when created")
|
||||
consumedAt: Optional[str] = Field(default=None, description="ISO timestamp when consumed (one-shot) or marked done")
|
||||
createdAt: float = Field(default_factory=lambda: datetime.now(timezone.utc).timestamp(), description="UTC unix timestamp when created", json_schema_extra={"frontend_type": "timestamp"})
|
||||
consumedAt: Optional[float] = Field(default=None, description="UTC unix timestamp when consumed (one-shot) or marked done", json_schema_extra={"frontend_type": "timestamp"})
|
||||
agentRunId: Optional[str] = Field(default=None, description="Reference to the agent run that processed this prompt")
|
||||
responseText: Optional[str] = Field(default=None, description="Final agent text delivered to the meeting")
|
||||
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ class TeamsbotObjects:
|
|||
if not includeEnded:
|
||||
records = [r for r in records if r.get("status") != TeamsbotSessionStatus.ENDED.value]
|
||||
# Sort by startedAt descending
|
||||
records.sort(key=lambda r: r.get("startedAt") or "", reverse=True)
|
||||
records.sort(key=lambda r: r.get("startedAt") or 0, reverse=True)
|
||||
return records
|
||||
|
||||
def getActiveSessions(self, instanceId: str) -> List[Dict[str, Any]]:
|
||||
|
|
@ -133,7 +133,7 @@ class TeamsbotObjects:
|
|||
TeamsbotTranscript,
|
||||
recordFilter={"sessionId": sessionId},
|
||||
)
|
||||
records.sort(key=lambda r: r.get("timestamp") or "")
|
||||
records.sort(key=lambda r: r.get("timestamp") or 0)
|
||||
if offset:
|
||||
records = records[offset:]
|
||||
if limit:
|
||||
|
|
@ -146,7 +146,7 @@ class TeamsbotObjects:
|
|||
TeamsbotTranscript,
|
||||
recordFilter={"sessionId": sessionId},
|
||||
)
|
||||
records.sort(key=lambda r: r.get("timestamp") or "")
|
||||
records.sort(key=lambda r: r.get("timestamp") or 0)
|
||||
return records[-count:]
|
||||
|
||||
def createTranscript(self, transcriptData: Dict[str, Any]) -> Dict[str, Any]:
|
||||
|
|
@ -176,7 +176,7 @@ class TeamsbotObjects:
|
|||
TeamsbotBotResponse,
|
||||
recordFilter={"sessionId": sessionId},
|
||||
)
|
||||
records.sort(key=lambda r: r.get("timestamp") or "")
|
||||
records.sort(key=lambda r: r.get("timestamp") or 0)
|
||||
return records
|
||||
|
||||
def createBotResponse(self, responseData: Dict[str, Any]) -> Dict[str, Any]:
|
||||
|
|
@ -293,7 +293,7 @@ class TeamsbotObjects:
|
|||
if operatorUserId:
|
||||
recordFilter["operatorUserId"] = operatorUserId
|
||||
records = self.db.getRecordset(TeamsbotDirectorPrompt, recordFilter=recordFilter)
|
||||
records.sort(key=lambda r: r.get("createdAt") or "")
|
||||
records.sort(key=lambda r: r.get("createdAt") or 0)
|
||||
return records
|
||||
|
||||
def getActivePersistentPrompts(self, sessionId: str) -> List[Dict[str, Any]]:
|
||||
|
|
@ -310,7 +310,7 @@ class TeamsbotObjects:
|
|||
TeamsbotDirectorPromptStatus.FAILED.value,
|
||||
}
|
||||
active = [r for r in records if r.get("status") not in terminal]
|
||||
active.sort(key=lambda r: r.get("createdAt") or "")
|
||||
active.sort(key=lambda r: r.get("createdAt") or 0)
|
||||
return active
|
||||
|
||||
def updateDirectorPrompt(self, promptId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
|
|
|
|||
|
|
@ -11,13 +11,14 @@ import re
|
|||
import asyncio
|
||||
import time
|
||||
import base64
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, Dict, Any, List, Callable
|
||||
|
||||
from fastapi import WebSocket
|
||||
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
|
||||
from modules.shared.timeUtils import getUtcTimestamp, getIsoTimestamp
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
from modules.serviceCenter import getService as _getServiceCenterService
|
||||
from modules.serviceCenter.context import ServiceCenterContext
|
||||
|
||||
|
|
@ -554,7 +555,7 @@ async def _emitSessionEvent(sessionId: str, eventType: str, data: Any):
|
|||
Creates the queue on-demand so events are never silently dropped."""
|
||||
if sessionId not in sessionEvents:
|
||||
sessionEvents[sessionId] = asyncio.Queue()
|
||||
await sessionEvents[sessionId].put({"type": eventType, "data": data, "timestamp": getIsoTimestamp()})
|
||||
await sessionEvents[sessionId].put({"type": eventType, "data": data, "timestamp": getUtcTimestamp()})
|
||||
|
||||
|
||||
def _normalizeGatewayHostForBotWs(host: str) -> str:
|
||||
|
|
@ -780,7 +781,7 @@ class TeamsbotService:
|
|||
|
||||
interface.updateSession(sessionId, {
|
||||
"status": TeamsbotSessionStatus.ENDED.value,
|
||||
"endedAt": getIsoTimestamp(),
|
||||
"endedAt": getUtcTimestamp(),
|
||||
})
|
||||
await _emitSessionEvent(sessionId, "statusChange", {"status": "ended"})
|
||||
|
||||
|
|
@ -794,7 +795,7 @@ class TeamsbotService:
|
|||
interface.updateSession(sessionId, {
|
||||
"status": TeamsbotSessionStatus.ERROR.value,
|
||||
"errorMessage": str(e),
|
||||
"endedAt": getIsoTimestamp(),
|
||||
"endedAt": getUtcTimestamp(),
|
||||
})
|
||||
|
||||
# Cleanup event queue
|
||||
|
|
@ -855,7 +856,7 @@ class TeamsbotService:
|
|||
try:
|
||||
await _emitSessionEvent(sessionId, "botConnectionState", {
|
||||
"connected": True,
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
|
|
@ -1029,7 +1030,7 @@ class TeamsbotService:
|
|||
"status": f"playback_{status}",
|
||||
"hasWebSocket": True,
|
||||
"message": ackMessage,
|
||||
"timestamp": playback.get("timestamp") or getIsoTimestamp(),
|
||||
"timestamp": playback.get("timestamp") or getUtcTimestamp(),
|
||||
"format": playback.get("format"),
|
||||
"bytesBase64": playback.get("bytesBase64"),
|
||||
})
|
||||
|
|
@ -1045,7 +1046,7 @@ class TeamsbotService:
|
|||
"mfaType": mfaType,
|
||||
"displayNumber": displayNumber,
|
||||
"prompt": prompt,
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
|
||||
from .routeFeatureTeamsbot import mfaCodeQueues, mfaWaitTasks
|
||||
|
|
@ -1094,7 +1095,7 @@ class TeamsbotService:
|
|||
"reason": reason,
|
||||
"message": errorData.get("message", "Chat message could not be sent"),
|
||||
"text": failedText,
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
|
||||
elif msgType == "mfaResolved":
|
||||
|
|
@ -1107,7 +1108,7 @@ class TeamsbotService:
|
|||
mfaCodeQueues.pop(sessionId, None)
|
||||
await _emitSessionEvent(sessionId, "mfaResolved", {
|
||||
"success": success,
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
|
|
@ -1122,7 +1123,7 @@ class TeamsbotService:
|
|||
try:
|
||||
await _emitSessionEvent(sessionId, "botConnectionState", {
|
||||
"connected": False,
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
|
|
@ -1156,9 +1157,9 @@ class TeamsbotService:
|
|||
if errorMessage:
|
||||
updates["errorMessage"] = errorMessage
|
||||
if dbStatus == TeamsbotSessionStatus.ACTIVE.value:
|
||||
updates["startedAt"] = getIsoTimestamp()
|
||||
updates["startedAt"] = getUtcTimestamp()
|
||||
elif dbStatus in [TeamsbotSessionStatus.ENDED.value, TeamsbotSessionStatus.ERROR.value]:
|
||||
updates["endedAt"] = getIsoTimestamp()
|
||||
updates["endedAt"] = getUtcTimestamp()
|
||||
|
||||
interface.updateSession(sessionId, updates)
|
||||
await _emitSessionEvent(sessionId, "statusChange", {"status": status, "errorMessage": errorMessage})
|
||||
|
|
@ -1350,7 +1351,7 @@ class TeamsbotService:
|
|||
sessionId=sessionId,
|
||||
speaker=speaker,
|
||||
text=text,
|
||||
timestamp=getIsoTimestamp(),
|
||||
timestamp=getUtcTimestamp(),
|
||||
confidence=1.0,
|
||||
language=self.config.language,
|
||||
isFinal=True,
|
||||
|
|
@ -1363,7 +1364,7 @@ class TeamsbotService:
|
|||
"speaker": speaker,
|
||||
"text": text,
|
||||
"confidence": 1.0,
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
"isContinuation": False,
|
||||
"source": "chatHistory",
|
||||
"isHistory": True,
|
||||
|
|
@ -1407,7 +1408,7 @@ class TeamsbotService:
|
|||
sessionId=sessionId,
|
||||
speaker=speaker,
|
||||
text=text,
|
||||
timestamp=getIsoTimestamp(),
|
||||
timestamp=getUtcTimestamp(),
|
||||
confidence=1.0,
|
||||
language=self.config.language,
|
||||
isFinal=isFinal,
|
||||
|
|
@ -1450,7 +1451,7 @@ class TeamsbotService:
|
|||
"speaker": speaker,
|
||||
"text": displayText,
|
||||
"confidence": 1.0,
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
"isContinuation": isMerge,
|
||||
"source": source,
|
||||
"speakerResolvedFromHint": (
|
||||
|
|
@ -1690,7 +1691,7 @@ class TeamsbotService:
|
|||
await _emitSessionEvent(sessionId, "speechCancelled", {
|
||||
"reason": reason,
|
||||
"generation": gen,
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
|
|
@ -2079,7 +2080,7 @@ class TeamsbotService:
|
|||
try:
|
||||
await _emitSessionEvent(sessionId, "quickAck", {
|
||||
"text": ackText,
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
cancelHook = self._makeAnswerCancelHook()
|
||||
async with self._meetingTtsLock:
|
||||
|
|
@ -2387,7 +2388,7 @@ class TeamsbotService:
|
|||
"status": "requested",
|
||||
"hasWebSocket": websocket is not None,
|
||||
"message": "TTS generation requested",
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
logger.info(
|
||||
f"Session {sessionId}: TTS requested (websocket_available={websocket is not None})"
|
||||
|
|
@ -2400,7 +2401,7 @@ class TeamsbotService:
|
|||
"status": "unavailable",
|
||||
"hasWebSocket": False,
|
||||
"message": "TTS skipped — bot websocket unavailable",
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
if not sendChat:
|
||||
sendChat = True
|
||||
|
|
@ -2428,7 +2429,7 @@ class TeamsbotService:
|
|||
"hasWebSocket": True,
|
||||
"chunks": ttsOutcome.get("chunks"),
|
||||
"played": ttsOutcome.get("played"),
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
else:
|
||||
logger.warning(
|
||||
|
|
@ -2440,7 +2441,7 @@ class TeamsbotService:
|
|||
"chunks": ttsOutcome.get("chunks"),
|
||||
"played": ttsOutcome.get("played"),
|
||||
"message": ttsOutcome.get("error"),
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
if not sendChat:
|
||||
sendChat = True # Fallback to chat if voice-only and TTS failed
|
||||
|
|
@ -2469,7 +2470,7 @@ class TeamsbotService:
|
|||
modelName=response.modelName,
|
||||
processingTime=response.processingTime,
|
||||
priceCHF=response.priceCHF,
|
||||
timestamp=getIsoTimestamp(),
|
||||
timestamp=getUtcTimestamp(),
|
||||
).model_dump()
|
||||
|
||||
createdResponse = interface.createBotResponse(botResponseData)
|
||||
|
|
@ -2501,7 +2502,7 @@ class TeamsbotService:
|
|||
sessionId=sessionId,
|
||||
speaker=self.config.botName,
|
||||
text=storedText,
|
||||
timestamp=getIsoTimestamp(),
|
||||
timestamp=getUtcTimestamp(),
|
||||
confidence=1.0,
|
||||
language=self.config.language,
|
||||
isFinal=True,
|
||||
|
|
@ -2520,7 +2521,7 @@ class TeamsbotService:
|
|||
"speaker": self.config.botName,
|
||||
"text": storedText,
|
||||
"confidence": 1.0,
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
"isContinuation": False,
|
||||
"source": "botResponse",
|
||||
"speakerResolvedFromHint": False,
|
||||
|
|
@ -2557,7 +2558,7 @@ class TeamsbotService:
|
|||
modelName=response.modelName,
|
||||
processingTime=response.processingTime,
|
||||
priceCHF=response.priceCHF,
|
||||
timestamp=getIsoTimestamp(),
|
||||
timestamp=getUtcTimestamp(),
|
||||
).model_dump()
|
||||
createdResponse = interface.createBotResponse(botResponseData)
|
||||
await _emitSessionEvent(sessionId, "botResponse", {
|
||||
|
|
@ -2707,7 +2708,7 @@ class TeamsbotService:
|
|||
sessionId=sessionId,
|
||||
speaker=self.config.botName,
|
||||
text=chatText,
|
||||
timestamp=getIsoTimestamp(),
|
||||
timestamp=getUtcTimestamp(),
|
||||
confidence=1.0,
|
||||
language=self.config.language,
|
||||
isFinal=True,
|
||||
|
|
@ -2732,7 +2733,7 @@ class TeamsbotService:
|
|||
"speaker": self.config.botName,
|
||||
"text": chatText,
|
||||
"confidence": 1.0,
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
"isContinuation": False,
|
||||
"source": "chat",
|
||||
"speakerResolvedFromHint": False,
|
||||
|
|
@ -2749,13 +2750,15 @@ class TeamsbotService:
|
|||
from . import interfaceFeatureTeamsbot as interfaceDb
|
||||
interface = interfaceDb.getInterface(self.currentUser, self.mandateId, self.instanceId)
|
||||
transcripts = interface.getTranscripts(sessionId)
|
||||
fromDt = params.get("fromdatetime") or params.get("fromDateTime")
|
||||
toDt = params.get("todatetime") or params.get("toDateTime")
|
||||
fromDtRaw = params.get("fromdatetime") or params.get("fromDateTime")
|
||||
toDtRaw = params.get("todatetime") or params.get("toDateTime")
|
||||
fromTs = datetime.fromisoformat(fromDtRaw).replace(tzinfo=timezone.utc).timestamp() if fromDtRaw else None
|
||||
toTs = datetime.fromisoformat(toDtRaw).replace(tzinfo=timezone.utc).timestamp() if toDtRaw else None
|
||||
chatOnly = [t for t in transcripts if t.get("source") in ("chat", "chatHistory")]
|
||||
if fromDt:
|
||||
chatOnly = [t for t in chatOnly if (t.get("timestamp") or "") >= fromDt]
|
||||
if toDt:
|
||||
chatOnly = [t for t in chatOnly if (t.get("timestamp") or "") <= toDt]
|
||||
if fromTs is not None:
|
||||
chatOnly = [t for t in chatOnly if (t.get("timestamp") or 0) >= fromTs]
|
||||
if toTs is not None:
|
||||
chatOnly = [t for t in chatOnly if (t.get("timestamp") or 0) <= toTs]
|
||||
summary = "\n".join(f"[{t.get('speaker', '?')}]: {t.get('text', '')}" for t in chatOnly[-20:])
|
||||
if not summary:
|
||||
summary = "Keine Chat-Nachrichten im angegebenen Zeitraum."
|
||||
|
|
@ -3002,7 +3005,7 @@ class TeamsbotService:
|
|||
"text": (prompt.get("text") or "").strip(),
|
||||
"fileIds": list(prompt.get("fileIds") or []),
|
||||
"note": (internalNote or meetingText or "").strip(),
|
||||
"recordedAt": getIsoTimestamp(),
|
||||
"recordedAt": getUtcTimestamp(),
|
||||
})
|
||||
if len(self._recentDirectorBriefings) > _RECENT_DIRECTOR_BRIEFINGS_MAX:
|
||||
self._recentDirectorBriefings = self._recentDirectorBriefings[
|
||||
|
|
@ -3066,7 +3069,7 @@ class TeamsbotService:
|
|||
return False
|
||||
interface.updateDirectorPrompt(promptId, {
|
||||
"status": TeamsbotDirectorPromptStatus.CONSUMED.value,
|
||||
"consumedAt": getIsoTimestamp(),
|
||||
"consumedAt": getUtcTimestamp(),
|
||||
"statusMessage": "Removed by operator",
|
||||
})
|
||||
self._activePersistentPrompts = [
|
||||
|
|
@ -3187,7 +3190,7 @@ class TeamsbotService:
|
|||
}
|
||||
if not isPersistent:
|
||||
updates["status"] = TeamsbotDirectorPromptStatus.CONSUMED.value
|
||||
updates["consumedAt"] = getIsoTimestamp()
|
||||
updates["consumedAt"] = getUtcTimestamp()
|
||||
interface.updateDirectorPrompt(promptId, updates)
|
||||
await _emitSessionEvent(sessionId, "directorPrompt", {
|
||||
"id": promptId,
|
||||
|
|
@ -3300,7 +3303,7 @@ class TeamsbotService:
|
|||
await _emitSessionEvent(sessionId, "agentRun", {
|
||||
"status": "interimNotice",
|
||||
"message": text,
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
|
||||
async def _runAgentForMeeting(
|
||||
|
|
@ -3352,7 +3355,7 @@ class TeamsbotService:
|
|||
"source": sourceLabel,
|
||||
"promptId": promptId,
|
||||
"status": "started",
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
|
||||
# Director prompts run silently by default — no spontaneous "moment please"
|
||||
|
|
@ -3577,7 +3580,7 @@ class TeamsbotService:
|
|||
"chunks": ttsOutcome.get("chunks"),
|
||||
"played": ttsOutcome.get("played"),
|
||||
"error": ttsOutcome.get("error"),
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
if not ttsOutcome.get("success"):
|
||||
logger.warning(
|
||||
|
|
@ -3615,7 +3618,7 @@ class TeamsbotService:
|
|||
modelName="agent",
|
||||
processingTime=0.0,
|
||||
priceCHF=0.0,
|
||||
timestamp=getIsoTimestamp(),
|
||||
timestamp=getUtcTimestamp(),
|
||||
).model_dump()
|
||||
createdResponse = interface.createBotResponse(botResponseData)
|
||||
|
||||
|
|
@ -3635,7 +3638,7 @@ class TeamsbotService:
|
|||
sessionId=sessionId,
|
||||
speaker=self.config.botName,
|
||||
text=text,
|
||||
timestamp=getIsoTimestamp(),
|
||||
timestamp=getUtcTimestamp(),
|
||||
confidence=1.0,
|
||||
language=self.config.language,
|
||||
isFinal=True,
|
||||
|
|
@ -3661,7 +3664,7 @@ class TeamsbotService:
|
|||
"speaker": self.config.botName,
|
||||
"text": text,
|
||||
"confidence": 1.0,
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
"isContinuation": False,
|
||||
"source": "botResponse",
|
||||
"speakerResolvedFromHint": False,
|
||||
|
|
@ -3710,7 +3713,7 @@ class TeamsbotService:
|
|||
modelName="agent",
|
||||
processingTime=0.0,
|
||||
priceCHF=0.0,
|
||||
timestamp=getIsoTimestamp(),
|
||||
timestamp=getUtcTimestamp(),
|
||||
).model_dump()
|
||||
createdResponse = interface.createBotResponse(botResponseData)
|
||||
|
||||
|
|
@ -3828,7 +3831,7 @@ class TeamsbotService:
|
|||
"status": "requested",
|
||||
"hasWebSocket": True,
|
||||
"message": "Greeting TTS requested",
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
cancelHook = self._makeAnswerCancelHook()
|
||||
async with self._meetingTtsLock:
|
||||
|
|
@ -3851,7 +3854,7 @@ class TeamsbotService:
|
|||
"hasWebSocket": True,
|
||||
"chunks": ttsOutcome.get("chunks"),
|
||||
"played": ttsOutcome.get("played"),
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
else:
|
||||
logger.warning(
|
||||
|
|
@ -3861,7 +3864,7 @@ class TeamsbotService:
|
|||
"status": "failed",
|
||||
"hasWebSocket": True,
|
||||
"message": ttsOutcome.get("error"),
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
|
||||
if sendToChat:
|
||||
|
|
@ -3881,7 +3884,7 @@ class TeamsbotService:
|
|||
sessionId=sessionId,
|
||||
speaker=self.config.botName,
|
||||
text=greetingText,
|
||||
timestamp=getIsoTimestamp(),
|
||||
timestamp=getUtcTimestamp(),
|
||||
confidence=1.0,
|
||||
language=greetingLang,
|
||||
isFinal=True,
|
||||
|
|
@ -3905,14 +3908,14 @@ class TeamsbotService:
|
|||
"responseType": TeamsbotResponseType.AUDIO.value,
|
||||
"detectedIntent": "greeting",
|
||||
"reasoning": "Automatic join greeting",
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
})
|
||||
await _emitSessionEvent(sessionId, "transcript", {
|
||||
"id": greetingTranscript.get("id"),
|
||||
"speaker": self.config.botName,
|
||||
"text": greetingText,
|
||||
"confidence": 1.0,
|
||||
"timestamp": getIsoTimestamp(),
|
||||
"timestamp": getUtcTimestamp(),
|
||||
"isContinuation": False,
|
||||
"source": "botResponse",
|
||||
"speakerResolvedFromHint": False,
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ Encapsulates: config loading -> connector resolution -> duplicate check -> push
|
|||
import json
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime as _dt, timezone as _tz
|
||||
from typing import List, Dict, Any, Optional
|
||||
|
||||
from .accountingConnectorBase import (
|
||||
|
|
@ -103,9 +104,12 @@ class AccountingBridge:
|
|||
costCenter=position.get("costCenter"),
|
||||
))
|
||||
|
||||
valutaTs = position.get("valuta")
|
||||
bookingDateStr = _dt.fromtimestamp(valutaTs, tz=_tz.utc).strftime("%Y-%m-%d") if valutaTs else ""
|
||||
|
||||
return AccountingBooking(
|
||||
reference=position.get("bookingReference") or position.get("id", ""),
|
||||
bookingDate=position.get("valuta") or "",
|
||||
bookingDate=bookingDateStr,
|
||||
description=position.get("desc", ""),
|
||||
lines=lines,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ import logging
|
|||
import os
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from datetime import datetime as _dt, timezone as _tz
|
||||
from pathlib import Path
|
||||
from typing import Callable, Dict, Any, List, Optional, Type
|
||||
|
||||
|
|
@ -33,6 +34,23 @@ logger = logging.getLogger(__name__)
|
|||
_HEARTBEAT_EVERY = 500
|
||||
|
||||
|
||||
def _isoDateToTimestamp(raw: Any) -> Optional[float]:
|
||||
"""Convert an ISO date string (``YYYY-MM-DD`` or datetime) to a UTC
|
||||
midnight unix timestamp. Returns ``None`` only when *raw* is
|
||||
falsy/None. Raises ``ValueError`` for non-empty but unparseable
|
||||
values so import errors are never silently swallowed.
|
||||
"""
|
||||
if raw is None or raw == "":
|
||||
return None
|
||||
s = str(raw).split("T")[0].strip()[:10]
|
||||
if not s:
|
||||
return None
|
||||
try:
|
||||
return _dt.strptime(s, "%Y-%m-%d").replace(tzinfo=_tz.utc).timestamp()
|
||||
except ValueError:
|
||||
raise ValueError(f"Cannot parse bookingDate '{raw}' as YYYY-MM-DD")
|
||||
|
||||
|
||||
def _isIncomeStatementAccount(accountNumber: str) -> bool:
|
||||
"""Swiss KMU-Kontenrahmen heuristic: 1xxx + 2xxx -> balance sheet
|
||||
(cumulative carry-over across years); 3xxx..9xxx -> income statement
|
||||
|
|
@ -360,8 +378,8 @@ class AccountingDataSync:
|
|||
logger.exception(f"AccountingDataSync: failed to write core lastSync* fields for cfg {cfgId}: {coreErr}")
|
||||
summary["errors"].append(f"Persist lastSync core: {coreErr}")
|
||||
extPayload = {
|
||||
"lastSyncDateFrom": dateFrom,
|
||||
"lastSyncDateTo": dateTo,
|
||||
"lastSyncDateFrom": _isoDateToTimestamp(dateFrom),
|
||||
"lastSyncDateTo": _isoDateToTimestamp(dateTo),
|
||||
"lastSyncCounts": {
|
||||
"accounts": int(summary.get("accounts", 0)),
|
||||
"journalEntries": int(summary.get("journalEntries", 0)),
|
||||
|
|
@ -432,18 +450,19 @@ class AccountingDataSync:
|
|||
newestDate: Optional[str] = None
|
||||
for raw in rawEntries:
|
||||
entryId = str(_uuid.uuid4())
|
||||
bookingDate = raw.get("bookingDate")
|
||||
if bookingDate:
|
||||
normalized = str(bookingDate).split("T")[0][:10]
|
||||
if normalized:
|
||||
if oldestDate is None or normalized < oldestDate:
|
||||
oldestDate = normalized
|
||||
if newestDate is None or normalized > newestDate:
|
||||
newestDate = normalized
|
||||
rawDate = raw.get("bookingDate")
|
||||
bookingTs = _isoDateToTimestamp(rawDate)
|
||||
if rawDate:
|
||||
isoDay = str(rawDate).split("T")[0][:10]
|
||||
if isoDay:
|
||||
if oldestDate is None or isoDay < oldestDate:
|
||||
oldestDate = isoDay
|
||||
if newestDate is None or isoDay > newestDate:
|
||||
newestDate = isoDay
|
||||
entryRows.append({
|
||||
"id": entryId,
|
||||
"externalId": raw.get("externalId"),
|
||||
"bookingDate": bookingDate,
|
||||
"bookingDate": bookingTs,
|
||||
"reference": raw.get("reference"),
|
||||
"description": raw.get("description", ""),
|
||||
"currency": raw.get("currency", "CHF"),
|
||||
|
|
@ -501,17 +520,14 @@ class AccountingDataSync:
|
|||
"""Persist account balances per (account, period) into ``TrusteeDataAccountBalance``.
|
||||
|
||||
Source of truth (``source="connector"``): the list returned by
|
||||
``BaseAccountingConnector.getAccountBalances`` is persisted 1:1.
|
||||
``BaseAccountingConnector.getAccountBalances`` is persisted with
|
||||
``openingBalance``/``closingBalance`` from the connector. If the
|
||||
connector doesn't supply ``debitTotal``/``creditTotal`` (e.g. RMA's
|
||||
``/gl/saldo`` only returns net balance), those fields are enriched
|
||||
from the already-imported journal lines.
|
||||
|
||||
Fallback (``source="local-fallback"``): aggregate the just-persisted
|
||||
journal lines into **cumulative** balances. Unlike the previous
|
||||
implementation, this version (a) carries the cumulative balance
|
||||
forward across months/years for balance-sheet accounts, (b) resets
|
||||
income-statement accounts at fiscal-year start, and (c) computes
|
||||
``openingBalance`` correctly as the previous period's
|
||||
``closingBalance``. ``openingBalance`` of the very first imported
|
||||
period stays at 0 (no prior data available -- by design; see plan
|
||||
document for rationale).
|
||||
journal lines into **cumulative** balances.
|
||||
"""
|
||||
t0 = time.time()
|
||||
self._bulkClear(modelBalance, featureInstanceId)
|
||||
|
|
@ -519,6 +535,9 @@ class AccountingDataSync:
|
|||
|
||||
if connectorBalances:
|
||||
rows = [_balanceModelToRow(b, scope) for b in connectorBalances]
|
||||
movements = self._aggregateJournalMovements(featureInstanceId, modelEntry, modelLine)
|
||||
if movements:
|
||||
self._enrichRowsWithMovements(rows, movements)
|
||||
n = self._bulkCreate(modelBalance, rows)
|
||||
logger.info(
|
||||
f"Persisted {n} balances for {featureInstanceId} in {time.time() - t0:.1f}s "
|
||||
|
|
@ -534,19 +553,19 @@ class AccountingDataSync:
|
|||
)
|
||||
return n
|
||||
|
||||
def _buildLocalBalanceFallback(
|
||||
def _aggregateJournalMovements(
|
||||
self,
|
||||
featureInstanceId: str,
|
||||
modelEntry: Type,
|
||||
modelLine: Type,
|
||||
scope: Dict[str, Any],
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Aggregate ``TrusteeDataJournalLine`` rows into cumulative period balances.
|
||||
) -> Dict[tuple, Dict[str, float]]:
|
||||
"""Aggregate debit/credit movements per ``(accountNumber, year, month)``
|
||||
from the already-persisted journal lines.
|
||||
|
||||
Returns rows ready for ``_bulkCreate``. Walks every account
|
||||
chronologically through all years observed in the journal so the
|
||||
cumulative balance and per-period opening are exact (within the
|
||||
bounds of the imported window).
|
||||
Returns ``{(accNo, year, month): {"debit": float, "credit": float}}``.
|
||||
Used by both the local-fallback balance builder and the connector-balance
|
||||
enrichment (RMA's ``/gl/saldo`` delivers net balance but no debit/credit
|
||||
breakdown).
|
||||
"""
|
||||
entries = self._if.db.getRecordset(
|
||||
modelEntry, recordFilter={"featureInstanceId": featureInstanceId},
|
||||
|
|
@ -563,8 +582,6 @@ class AccountingDataSync:
|
|||
) or []
|
||||
|
||||
movements: Dict[tuple, Dict[str, float]] = defaultdict(lambda: {"debit": 0.0, "credit": 0.0})
|
||||
observedYears: set = set()
|
||||
observedAccounts: set = set()
|
||||
for ln in lines:
|
||||
if isinstance(ln, dict):
|
||||
jeid = ln.get("journalEntryId", "")
|
||||
|
|
@ -577,19 +594,71 @@ class AccountingDataSync:
|
|||
debit = float(getattr(ln, "debitAmount", 0))
|
||||
credit = float(getattr(ln, "creditAmount", 0))
|
||||
|
||||
bdate = entryDates.get(jeid, "")
|
||||
bdate = entryDates.get(jeid)
|
||||
if not accNo or not bdate:
|
||||
continue
|
||||
parts = str(bdate).split("-")
|
||||
if len(parts) < 2:
|
||||
continue
|
||||
try:
|
||||
year = int(parts[0])
|
||||
month = int(parts[1])
|
||||
except ValueError:
|
||||
dt = _dt.fromtimestamp(float(bdate), tz=_tz.utc)
|
||||
year = dt.year
|
||||
month = dt.month
|
||||
except (ValueError, TypeError, OSError):
|
||||
continue
|
||||
movements[(accNo, year, month)]["debit"] += debit
|
||||
movements[(accNo, year, month)]["credit"] += credit
|
||||
return movements
|
||||
|
||||
@staticmethod
|
||||
def _enrichRowsWithMovements(
|
||||
rows: List[Dict[str, Any]],
|
||||
movements: Dict[tuple, Dict[str, float]],
|
||||
) -> None:
|
||||
"""Patch ``debitTotal`` / ``creditTotal`` on balance rows from journal movements.
|
||||
|
||||
For monthly rows: use the exact month's movement.
|
||||
For annual rows (``periodMonth=0``): sum all 12 months of that year+account.
|
||||
Only overwrites if the existing value is 0 (connector didn't provide it).
|
||||
"""
|
||||
for row in rows:
|
||||
if row.get("debitTotal", 0) != 0 or row.get("creditTotal", 0) != 0:
|
||||
continue
|
||||
accNo = row.get("accountNumber", "")
|
||||
year = row.get("periodYear", 0)
|
||||
month = row.get("periodMonth", 0)
|
||||
if month > 0:
|
||||
mov = movements.get((accNo, year, month))
|
||||
if mov:
|
||||
row["debitTotal"] = round(mov["debit"], 2)
|
||||
row["creditTotal"] = round(mov["credit"], 2)
|
||||
else:
|
||||
yearDebit = 0.0
|
||||
yearCredit = 0.0
|
||||
for m in range(1, 13):
|
||||
mov = movements.get((accNo, year, m))
|
||||
if mov:
|
||||
yearDebit += mov["debit"]
|
||||
yearCredit += mov["credit"]
|
||||
if yearDebit or yearCredit:
|
||||
row["debitTotal"] = round(yearDebit, 2)
|
||||
row["creditTotal"] = round(yearCredit, 2)
|
||||
|
||||
def _buildLocalBalanceFallback(
|
||||
self,
|
||||
featureInstanceId: str,
|
||||
modelEntry: Type,
|
||||
modelLine: Type,
|
||||
scope: Dict[str, Any],
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Aggregate ``TrusteeDataJournalLine`` rows into cumulative period balances.
|
||||
|
||||
Returns rows ready for ``_bulkCreate``. Walks every account
|
||||
chronologically through all years observed in the journal so the
|
||||
cumulative balance and per-period opening are exact (within the
|
||||
bounds of the imported window).
|
||||
"""
|
||||
movements = self._aggregateJournalMovements(featureInstanceId, modelEntry, modelLine)
|
||||
observedYears: set = set()
|
||||
observedAccounts: set = set()
|
||||
for (accNo, year, month) in movements:
|
||||
observedYears.add(year)
|
||||
observedAccounts.add(accNo)
|
||||
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ class TrusteeOrganisation(PowerOnModel):
|
|||
description="Mandate ID (system-level organisation)",
|
||||
json_schema_extra={
|
||||
"label": "Mandat",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -57,7 +57,7 @@ class TrusteeOrganisation(PowerOnModel):
|
|||
description="Feature Instance ID for instance-level isolation",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -92,7 +92,7 @@ class TrusteeRole(PowerOnModel):
|
|||
description="Mandate ID",
|
||||
json_schema_extra={
|
||||
"label": "Mandat",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -103,7 +103,7 @@ class TrusteeRole(PowerOnModel):
|
|||
description="Feature Instance ID for instance-level isolation",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -132,7 +132,7 @@ class TrusteeAccess(PowerOnModel):
|
|||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_options": "/api/trustee/{instanceId}/organisations/options",
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation"},
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation", "labelField": "label"},
|
||||
}
|
||||
)
|
||||
roleId: str = Field(
|
||||
|
|
@ -143,7 +143,7 @@ class TrusteeAccess(PowerOnModel):
|
|||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_options": "/api/trustee/{instanceId}/roles/options",
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeRole"},
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeRole", "labelField": "desc"},
|
||||
}
|
||||
)
|
||||
userId: str = Field(
|
||||
|
|
@ -154,7 +154,7 @@ class TrusteeAccess(PowerOnModel):
|
|||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_options": "/api/users/options",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
}
|
||||
)
|
||||
contractId: Optional[str] = Field(
|
||||
|
|
@ -167,7 +167,7 @@ class TrusteeAccess(PowerOnModel):
|
|||
"frontend_required": False,
|
||||
"frontend_options": "/api/trustee/{instanceId}/contracts/options",
|
||||
"frontend_depends_on": "organisationId",
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeContract"},
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeContract", "labelField": "label"},
|
||||
}
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
|
|
@ -175,7 +175,7 @@ class TrusteeAccess(PowerOnModel):
|
|||
description="Mandate ID",
|
||||
json_schema_extra={
|
||||
"label": "Mandat",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -186,7 +186,7 @@ class TrusteeAccess(PowerOnModel):
|
|||
description="Feature Instance ID for instance-level isolation",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -215,7 +215,7 @@ class TrusteeContract(PowerOnModel):
|
|||
"frontend_readonly": False, # Editable at creation, then readonly
|
||||
"frontend_required": True,
|
||||
"frontend_options": "/api/trustee/{instanceId}/organisations/options",
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation"},
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation", "labelField": "label"},
|
||||
}
|
||||
)
|
||||
label: str = Field(
|
||||
|
|
@ -242,7 +242,7 @@ class TrusteeContract(PowerOnModel):
|
|||
description="Mandate ID",
|
||||
json_schema_extra={
|
||||
"label": "Mandat",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -253,7 +253,7 @@ class TrusteeContract(PowerOnModel):
|
|||
description="Feature Instance ID for instance-level isolation",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -311,7 +311,7 @@ class TrusteeDocument(PowerOnModel):
|
|||
"frontend_type": "file_reference",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_management", "table": "FileItem"},
|
||||
"fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"},
|
||||
}
|
||||
)
|
||||
documentName: str = Field(
|
||||
|
|
@ -359,7 +359,7 @@ class TrusteeDocument(PowerOnModel):
|
|||
description="Mandate ID (auto-set from context)",
|
||||
json_schema_extra={
|
||||
"label": "Mandat",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
|
|
@ -371,7 +371,7 @@ class TrusteeDocument(PowerOnModel):
|
|||
description="Feature Instance ID for instance-level isolation (auto-set from context)",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
|
|
@ -439,7 +439,7 @@ class TrusteePosition(PowerOnModel):
|
|||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"frontend_options": "/api/trustee/{instanceId}/documents/options",
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument"},
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument", "labelField": "documentName"},
|
||||
}
|
||||
)
|
||||
bankDocumentId: Optional[str] = Field(
|
||||
|
|
@ -451,12 +451,12 @@ class TrusteePosition(PowerOnModel):
|
|||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"frontend_options": "/api/trustee/{instanceId}/documents/options",
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument"},
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument", "labelField": "documentName"},
|
||||
}
|
||||
)
|
||||
valuta: Optional[str] = Field(
|
||||
valuta: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Value date (ISO format: YYYY-MM-DD)",
|
||||
description="Value date (UTC midnight unix timestamp)",
|
||||
json_schema_extra={
|
||||
"label": "Valutadatum",
|
||||
"frontend_type": "date",
|
||||
|
|
@ -684,9 +684,9 @@ class TrusteePosition(PowerOnModel):
|
|||
"frontend_required": False
|
||||
}
|
||||
)
|
||||
dueDate: Optional[str] = Field(
|
||||
dueDate: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Payment due date (ISO format: YYYY-MM-DD)",
|
||||
description="Payment due date (UTC midnight unix timestamp)",
|
||||
json_schema_extra={
|
||||
"label": "Fälligkeitsdatum",
|
||||
"frontend_type": "date",
|
||||
|
|
@ -699,7 +699,7 @@ class TrusteePosition(PowerOnModel):
|
|||
description="Mandate ID (auto-set from context)",
|
||||
json_schema_extra={
|
||||
"label": "Mandat",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
|
|
@ -711,7 +711,7 @@ class TrusteePosition(PowerOnModel):
|
|||
description="Feature Instance ID for instance-level isolation (auto-set from context)",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
|
|
@ -742,15 +742,15 @@ class TrusteeDataAccount(PowerOnModel):
|
|||
accountGroup: Optional[str] = Field(default=None, description="Account group/category", json_schema_extra={"label": "Gruppe"})
|
||||
currency: str = Field(default="CHF", description="Account currency", json_schema_extra={"label": "Währung"})
|
||||
isActive: bool = Field(default=True, json_schema_extra={"label": "Aktiv"})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
|
||||
|
||||
@i18nModel("Buchung (Sync)")
|
||||
class TrusteeDataJournalEntry(PowerOnModel):
|
||||
"""Journal entry header synced from external accounting system."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
|
||||
externalId: Optional[str] = Field(default=None, description="ID in the source system", json_schema_extra={"label": "Externe ID"})
|
||||
bookingDate: Optional[str] = Field(default=None, description="Booking date (YYYY-MM-DD)", json_schema_extra={"label": "Datum"})
|
||||
bookingDate: Optional[float] = Field(default=None, description="Booking date (UTC unix timestamp)", json_schema_extra={"label": "Datum", "frontend_type": "timestamp"})
|
||||
reference: Optional[str] = Field(default=None, description="Booking reference / voucher number", json_schema_extra={"label": "Referenz"})
|
||||
description: str = Field(default="", description="Booking text", json_schema_extra={"label": "Beschreibung"})
|
||||
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
|
||||
|
|
@ -763,14 +763,14 @@ class TrusteeDataJournalEntry(PowerOnModel):
|
|||
"frontend_format": "R:#'###.00",
|
||||
},
|
||||
)
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
|
||||
|
||||
@i18nModel("Buchungszeile (Sync)")
|
||||
class TrusteeDataJournalLine(PowerOnModel):
|
||||
"""Journal entry line (debit/credit) synced from external accounting system."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
|
||||
journalEntryId: str = Field(description="FK → TrusteeDataJournalEntry.id", json_schema_extra={"label": "Buchung", "fk_target": {"db": "poweron_trustee", "table": "TrusteeDataJournalEntry"}})
|
||||
journalEntryId: str = Field(description="FK → TrusteeDataJournalEntry.id", json_schema_extra={"label": "Buchung", "fk_target": {"db": "poweron_trustee", "table": "TrusteeDataJournalEntry", "labelField": "reference"}})
|
||||
accountNumber: str = Field(description="Account number", json_schema_extra={"label": "Konto"})
|
||||
debitAmount: float = Field(default=0.0, json_schema_extra={"label": "Soll", "frontend_format": "R:#'###.00"})
|
||||
creditAmount: float = Field(default=0.0, json_schema_extra={"label": "Haben", "frontend_format": "R:#'###.00"})
|
||||
|
|
@ -778,8 +778,8 @@ class TrusteeDataJournalLine(PowerOnModel):
|
|||
taxCode: Optional[str] = Field(default=None, json_schema_extra={"label": "Steuercode"})
|
||||
costCenter: Optional[str] = Field(default=None, json_schema_extra={"label": "Kostenstelle"})
|
||||
description: str = Field(default="", json_schema_extra={"label": "Beschreibung"})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
|
||||
|
||||
@i18nModel("Kontakt (Sync)")
|
||||
class TrusteeDataContact(PowerOnModel):
|
||||
|
|
@ -796,8 +796,8 @@ class TrusteeDataContact(PowerOnModel):
|
|||
email: Optional[str] = Field(default=None, json_schema_extra={"label": "E-Mail"})
|
||||
phone: Optional[str] = Field(default=None, json_schema_extra={"label": "Telefon"})
|
||||
vatNumber: Optional[str] = Field(default=None, json_schema_extra={"label": "MWST-Nr."})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
|
||||
|
||||
@i18nModel("Kontosaldo (Sync)")
|
||||
class TrusteeDataAccountBalance(PowerOnModel):
|
||||
|
|
@ -811,8 +811,8 @@ class TrusteeDataAccountBalance(PowerOnModel):
|
|||
creditTotal: float = Field(default=0.0, json_schema_extra={"label": "Haben-Umsatz", "frontend_format": "R:#'###.00"})
|
||||
closingBalance: float = Field(default=0.0, json_schema_extra={"label": "Schlusssaldo", "frontend_format": "R:#'###.00"})
|
||||
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
|
||||
|
||||
@i18nModel("Buchhaltungs-Konfiguration")
|
||||
class TrusteeAccountingConfig(PowerOnModel):
|
||||
|
|
@ -822,20 +822,20 @@ class TrusteeAccountingConfig(PowerOnModel):
|
|||
Credentials are stored encrypted (decrypted at runtime by the AccountingBridge).
|
||||
"""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
|
||||
featureInstanceId: str = Field(description="FK -> FeatureInstance.id (1:1)", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||
featureInstanceId: str = Field(description="FK -> FeatureInstance.id (1:1)", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
|
||||
connectorType: str = Field(description="Connector type key, e.g. 'rma', 'bexio', 'abacus'", json_schema_extra={"label": "System"})
|
||||
displayLabel: str = Field(default="", description="User-visible label for this integration", json_schema_extra={"label": "Bezeichnung"})
|
||||
encryptedConfig: str = Field(default="", description="Encrypted JSON blob with connector credentials", json_schema_extra={"label": "Verschlüsselte Konfiguration"})
|
||||
isActive: bool = Field(default=True, json_schema_extra={"label": "Aktiv"})
|
||||
lastSyncAt: Optional[float] = Field(default=None, description="Timestamp of last sync attempt", json_schema_extra={"label": "Letzte Synchronisation"})
|
||||
lastSyncAt: Optional[float] = Field(default=None, description="Timestamp of last sync attempt", json_schema_extra={"label": "Letzte Synchronisation", "frontend_type": "timestamp"})
|
||||
lastSyncStatus: Optional[str] = Field(default=None, description="Last sync result: success, error, partial", json_schema_extra={"label": "Status"})
|
||||
lastSyncErrorMessage: Optional[str] = Field(default=None, description="Error message when lastSyncStatus is error", json_schema_extra={"label": "Fehlermeldung"})
|
||||
lastSyncDateFrom: Optional[str] = Field(default=None, description="dateFrom (ISO date) of the last data import window", json_schema_extra={"label": "Letztes Import-Fenster von"})
|
||||
lastSyncDateTo: Optional[str] = Field(default=None, description="dateTo (ISO date) of the last data import window", json_schema_extra={"label": "Letztes Import-Fenster bis"})
|
||||
lastSyncDateFrom: Optional[float] = Field(default=None, description="dateFrom (UTC midnight unix timestamp) of the last data import window", json_schema_extra={"label": "Letztes Import-Fenster von", "frontend_type": "date"})
|
||||
lastSyncDateTo: Optional[float] = Field(default=None, description="dateTo (UTC midnight unix timestamp) of the last data import window", json_schema_extra={"label": "Letztes Import-Fenster bis", "frontend_type": "date"})
|
||||
lastSyncCounts: Optional[Dict[str, Any]] = Field(default=None, description="Last import summary: per-entity counts (accounts, journalEntries, journalLines, contacts, accountBalances) plus oldestBookingDate / newestBookingDate (ISO YYYY-MM-DD) for completeness verification", json_schema_extra={"label": "Letzte Import-Zaehler"})
|
||||
cachedChartOfAccounts: Optional[str] = Field(default=None, description="JSON-serialised chart of accounts cache (list of {accountNumber, label, accountType})", json_schema_extra={"label": "Cached Kontoplan"})
|
||||
chartCachedAt: Optional[float] = Field(default=None, description="Timestamp when cachedChartOfAccounts was last refreshed", json_schema_extra={"label": "Kontoplan-Cache-Zeitpunkt"})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||
chartCachedAt: Optional[float] = Field(default=None, description="Timestamp when cachedChartOfAccounts was last refreshed", json_schema_extra={"label": "Kontoplan-Cache-Zeitpunkt", "frontend_type": "timestamp"})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
|
||||
|
||||
@i18nModel("Buchhaltungs-Synchronisation")
|
||||
class TrusteeAccountingSync(PowerOnModel):
|
||||
|
|
@ -846,16 +846,16 @@ class TrusteeAccountingSync(PowerOnModel):
|
|||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
|
||||
positionId: str = Field(
|
||||
description="FK -> TrusteePosition.id",
|
||||
json_schema_extra={"label": "Position", "fk_target": {"db": "poweron_trustee", "table": "TrusteePosition"}},
|
||||
json_schema_extra={"label": "Position", "fk_target": {"db": "poweron_trustee", "table": "TrusteePosition", "labelField": None}},
|
||||
)
|
||||
featureInstanceId: str = Field(description="FK -> FeatureInstance.id", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||
featureInstanceId: str = Field(description="FK -> FeatureInstance.id", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
|
||||
connectorType: str = Field(description="Connector type at time of sync", json_schema_extra={"label": "System"})
|
||||
externalId: Optional[str] = Field(default=None, description="ID assigned by the external system", json_schema_extra={"label": "Externe ID"})
|
||||
externalReference: Optional[str] = Field(default=None, description="Reference in the external system", json_schema_extra={"label": "Externe Referenz"})
|
||||
syncStatus: str = Field(default="pending", description="pending | synced | error | cancelled", json_schema_extra={"label": "Status"})
|
||||
syncDirection: str = Field(default="push", description="push (local->ext) or pull (ext->local)", json_schema_extra={"label": "Richtung"})
|
||||
syncedAt: Optional[float] = Field(default=None, description="Timestamp of successful sync", json_schema_extra={"label": "Synchronisiert am"})
|
||||
syncedAt: Optional[float] = Field(default=None, description="Timestamp of successful sync", json_schema_extra={"label": "Synchronisiert am", "frontend_type": "timestamp"})
|
||||
errorMessage: Optional[str] = Field(default=None, json_schema_extra={"label": "Fehler"})
|
||||
bookingPayload: Optional[dict] = Field(default=None, description="Payload sent to the external system (audit)", json_schema_extra={"label": "Buchungs-Payload"})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
|
||||
|
||||
|
|
|
|||
|
|
@ -126,13 +126,11 @@ def _sanitisePositionPayload(data: Dict[str, Any]) -> Dict[str, Any]:
|
|||
"""Failsafe normalisation for TrusteePosition payloads before DB writes."""
|
||||
safeData = dict(data or {})
|
||||
|
||||
isoValuta = _normaliseIsoDate(safeData.get("valuta"))
|
||||
safeData["valuta"] = isoValuta
|
||||
valutaTs = _normaliseTimestamp(safeData.get("valuta"))
|
||||
safeData["valuta"] = valutaTs
|
||||
|
||||
safeData["transactionDateTime"] = _normaliseTimestamp(
|
||||
safeData.get("transactionDateTime"),
|
||||
fallbackIsoDate=isoValuta,
|
||||
)
|
||||
txTs = _normaliseTimestamp(safeData.get("transactionDateTime"))
|
||||
safeData["transactionDateTime"] = txTs if txTs is not None else valutaTs
|
||||
|
||||
safeData["bookingAmount"] = _toSafeFloat(safeData.get("bookingAmount"), defaultValue=0.0)
|
||||
safeData["originalAmount"] = _toSafeFloat(
|
||||
|
|
@ -148,7 +146,7 @@ def _sanitisePositionPayload(data: Dict[str, Any]) -> Dict[str, Any]:
|
|||
safeData["originalCurrency"] = str(originalCurrency).upper()
|
||||
|
||||
if "dueDate" in safeData and safeData["dueDate"]:
|
||||
safeData["dueDate"] = _normaliseIsoDate(safeData["dueDate"])
|
||||
safeData["dueDate"] = _normaliseTimestamp(safeData["dueDate"])
|
||||
|
||||
_VALID_DOC_TYPES = {"invoice", "expense_receipt", "bank_document", "contract", "unknown"}
|
||||
docType = safeData.get("documentType")
|
||||
|
|
|
|||
|
|
@ -393,9 +393,10 @@ def get_position_options(
|
|||
items = result.items if hasattr(result, 'items') else result
|
||||
|
||||
def _makePositionLabel(p: TrusteePosition) -> str:
|
||||
from datetime import datetime as _dt, timezone as _tz
|
||||
parts = []
|
||||
if p.valuta:
|
||||
parts.append(str(p.valuta)[:10]) # Datum ohne Zeit
|
||||
parts.append(_dt.fromtimestamp(p.valuta, tz=_tz.utc).strftime("%Y-%m-%d"))
|
||||
if p.company:
|
||||
parts.append(p.company[:30])
|
||||
if p.desc:
|
||||
|
|
@ -978,33 +979,27 @@ def get_documents(
|
|||
|
||||
def _handleDocumentMode(instanceId, mandateId, mode, column, pagination, context):
|
||||
"""Handle mode=filterValues and mode=ids for trustee documents."""
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
|
||||
from modules.routes.routeHelpers import handleIdsInMemory
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
try:
|
||||
from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
from modules.routes.routeHelpers import parseCrossFilterPagination
|
||||
crossFilterPagination = parseCrossFilterPagination(column, pagination)
|
||||
from fastapi.responses import JSONResponse
|
||||
values = getDistinctColumnValuesWithRBAC(
|
||||
connector=interface.db,
|
||||
modelClass=TrusteeDocument,
|
||||
column=column,
|
||||
currentUser=interface.currentUser,
|
||||
pagination=crossFilterPagination,
|
||||
recordFilter=None,
|
||||
mandateId=interface.mandateId,
|
||||
featureInstanceId=interface.featureInstanceId,
|
||||
featureCode=interface.FEATURE_CODE
|
||||
)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
except Exception:
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllDocuments(None)
|
||||
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC
|
||||
from modules.routes.routeHelpers import parseCrossFilterPagination
|
||||
from fastapi.responses import JSONResponse
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
crossFilterPagination = parseCrossFilterPagination(column, pagination)
|
||||
values = getDistinctColumnValuesWithRBAC(
|
||||
connector=interface.db,
|
||||
modelClass=TrusteeDocument,
|
||||
column=column,
|
||||
currentUser=interface.currentUser,
|
||||
pagination=crossFilterPagination,
|
||||
recordFilter=None,
|
||||
mandateId=interface.mandateId,
|
||||
featureInstanceId=interface.featureInstanceId,
|
||||
featureCode=interface.FEATURE_CODE
|
||||
)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
if mode == "ids":
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllDocuments(None)
|
||||
|
|
@ -1227,33 +1222,27 @@ def get_positions(
|
|||
|
||||
def _handlePositionMode(instanceId, mandateId, mode, column, pagination, context):
|
||||
"""Handle mode=filterValues and mode=ids for trustee positions."""
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
|
||||
from modules.routes.routeHelpers import handleIdsInMemory
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
try:
|
||||
from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
from modules.routes.routeHelpers import parseCrossFilterPagination
|
||||
crossFilterPagination = parseCrossFilterPagination(column, pagination)
|
||||
from fastapi.responses import JSONResponse
|
||||
values = getDistinctColumnValuesWithRBAC(
|
||||
connector=interface.db,
|
||||
modelClass=TrusteePosition,
|
||||
column=column,
|
||||
currentUser=interface.currentUser,
|
||||
pagination=crossFilterPagination,
|
||||
recordFilter=None,
|
||||
mandateId=interface.mandateId,
|
||||
featureInstanceId=interface.featureInstanceId,
|
||||
featureCode=interface.FEATURE_CODE
|
||||
)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
except Exception:
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllPositions(None)
|
||||
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC
|
||||
from modules.routes.routeHelpers import parseCrossFilterPagination
|
||||
from fastapi.responses import JSONResponse
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
crossFilterPagination = parseCrossFilterPagination(column, pagination)
|
||||
values = getDistinctColumnValuesWithRBAC(
|
||||
connector=interface.db,
|
||||
modelClass=TrusteePosition,
|
||||
column=column,
|
||||
currentUser=interface.currentUser,
|
||||
pagination=crossFilterPagination,
|
||||
recordFilter=None,
|
||||
mandateId=interface.mandateId,
|
||||
featureInstanceId=interface.featureInstanceId,
|
||||
featureCode=interface.FEATURE_CODE
|
||||
)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
if mode == "ids":
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllPositions(None)
|
||||
|
|
@ -2338,6 +2327,63 @@ def delete_instance_role_rule(
|
|||
# (Unified Filter API: mode=filterValues / mode=ids).
|
||||
|
||||
|
||||
def _buildFeatureInternalResolvers(modelClass, db) -> Dict[str, Any]:
|
||||
"""Build ``extraResolvers`` for FK fields that point to other Trustee models.
|
||||
|
||||
The builtin ``enrichRowsWithFkLabels`` only covers Mandate / FeatureInstance /
|
||||
User / Role. Feature-internal FKs (e.g. ``journalEntryId`` -> ``TrusteeDataJournalEntry``)
|
||||
need a resolver that queries the Trustee DB. This function discovers such fields
|
||||
from the Pydantic model's ``fk_target`` annotations and creates a resolver per field.
|
||||
|
||||
Label strategy per target model:
|
||||
- ``TrusteeDataJournalEntry``: ``"<externalId> | <bookingDate>"``
|
||||
- Generic fallback: ``"<externalId>"`` or ``"<id[:8]>"``
|
||||
"""
|
||||
resolvers: Dict[str, Any] = {}
|
||||
for name, fieldInfo in modelClass.model_fields.items():
|
||||
extra = fieldInfo.json_schema_extra
|
||||
if not extra or not isinstance(extra, dict):
|
||||
continue
|
||||
tgt = extra.get("fk_target")
|
||||
if not isinstance(tgt, dict):
|
||||
continue
|
||||
tableName = tgt.get("table", "")
|
||||
if tableName not in _TRUSTEE_ENTITY_MODELS:
|
||||
continue
|
||||
targetModel = _TRUSTEE_ENTITY_MODELS[tableName]
|
||||
|
||||
def _makeResolver(model, field=name):
|
||||
def _resolve(ids: List[str]) -> Dict[str, Optional[str]]:
|
||||
result: Dict[str, Optional[str]] = {i: None for i in ids}
|
||||
try:
|
||||
recs = db.getRecordset(model, recordFilter={"id": list(set(ids))}) or []
|
||||
except Exception:
|
||||
return result
|
||||
for r in recs:
|
||||
row = r if isinstance(r, dict) else r.model_dump() if hasattr(r, "model_dump") else {}
|
||||
rid = row.get("id", "")
|
||||
parts = []
|
||||
for col in ("externalId", "reference", "bookingDate", "label", "name", "accountNumber"):
|
||||
val = row.get(col)
|
||||
if val is not None and val != "":
|
||||
if col == "bookingDate" and isinstance(val, (int, float)):
|
||||
from datetime import datetime, timezone
|
||||
try:
|
||||
parts.append(datetime.fromtimestamp(val, tz=timezone.utc).strftime("%Y-%m-%d"))
|
||||
except Exception:
|
||||
parts.append(str(val))
|
||||
else:
|
||||
parts.append(str(val))
|
||||
if len(parts) >= 2:
|
||||
break
|
||||
result[rid] = " | ".join(parts) if parts else rid[:8]
|
||||
return result
|
||||
return _resolve
|
||||
|
||||
resolvers[name] = _makeResolver(targetModel)
|
||||
return resolvers
|
||||
|
||||
|
||||
def _paginatedReadEndpoint(
|
||||
*,
|
||||
instanceId: str,
|
||||
|
|
@ -2359,7 +2405,6 @@ def _paginatedReadEndpoint(
|
|||
getDistinctColumnValuesWithRBAC,
|
||||
)
|
||||
from modules.routes.routeHelpers import (
|
||||
handleFilterValuesInMemory,
|
||||
handleIdsInMemory,
|
||||
parseCrossFilterPagination,
|
||||
enrichRowsWithFkLabels,
|
||||
|
|
@ -2372,34 +2417,19 @@ def _paginatedReadEndpoint(
|
|||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
try:
|
||||
crossFilterPagination = parseCrossFilterPagination(column, pagination)
|
||||
values = getDistinctColumnValuesWithRBAC(
|
||||
connector=interface.db,
|
||||
modelClass=modelClass,
|
||||
column=column,
|
||||
currentUser=interface.currentUser,
|
||||
pagination=crossFilterPagination,
|
||||
recordFilter=None,
|
||||
mandateId=interface.mandateId,
|
||||
featureInstanceId=interface.featureInstanceId,
|
||||
featureCode=interface.FEATURE_CODE,
|
||||
)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
except Exception:
|
||||
result = getRecordsetPaginatedWithRBAC(
|
||||
connector=interface.db,
|
||||
modelClass=modelClass,
|
||||
currentUser=interface.currentUser,
|
||||
pagination=None,
|
||||
recordFilter=None,
|
||||
mandateId=interface.mandateId,
|
||||
featureInstanceId=interface.featureInstanceId,
|
||||
featureCode=interface.FEATURE_CODE,
|
||||
)
|
||||
items = result.items if hasattr(result, "items") else result
|
||||
items = [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
crossFilterPagination = parseCrossFilterPagination(column, pagination)
|
||||
values = getDistinctColumnValuesWithRBAC(
|
||||
connector=interface.db,
|
||||
modelClass=modelClass,
|
||||
column=column,
|
||||
currentUser=interface.currentUser,
|
||||
pagination=crossFilterPagination,
|
||||
recordFilter=None,
|
||||
mandateId=interface.mandateId,
|
||||
featureInstanceId=interface.featureInstanceId,
|
||||
featureCode=interface.FEATURE_CODE,
|
||||
)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
|
||||
if mode == "ids":
|
||||
result = getRecordsetPaginatedWithRBAC(
|
||||
|
|
@ -2431,8 +2461,13 @@ def _paginatedReadEndpoint(
|
|||
def _itemsToDicts(rawItems):
|
||||
return [r.model_dump() if hasattr(r, "model_dump") else r for r in rawItems]
|
||||
|
||||
featureResolvers = _buildFeatureInternalResolvers(modelClass, interface.db)
|
||||
|
||||
if paginationParams and hasattr(result, "items"):
|
||||
enriched = enrichRowsWithFkLabels(_itemsToDicts(result.items), modelClass)
|
||||
enriched = enrichRowsWithFkLabels(
|
||||
_itemsToDicts(result.items), modelClass,
|
||||
extraResolvers=featureResolvers or None,
|
||||
)
|
||||
return {
|
||||
"items": enriched,
|
||||
"pagination": PaginationMetadata(
|
||||
|
|
@ -2445,7 +2480,10 @@ def _paginatedReadEndpoint(
|
|||
).model_dump(),
|
||||
}
|
||||
items = result.items if hasattr(result, "items") else result
|
||||
enriched = enrichRowsWithFkLabels(_itemsToDicts(items), modelClass)
|
||||
enriched = enrichRowsWithFkLabels(
|
||||
_itemsToDicts(items), modelClass,
|
||||
extraResolvers=featureResolvers or None,
|
||||
)
|
||||
return {"items": enriched, "pagination": None}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class WorkspaceUserSettings(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
mandateId: str = Field(
|
||||
|
|
@ -34,7 +34,7 @@ class WorkspaceUserSettings(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -44,7 +44,7 @@ class WorkspaceUserSettings(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
maxAgentRounds: Optional[int] = Field(
|
||||
|
|
|
|||
|
|
@ -1599,18 +1599,19 @@ class AppObjects:
|
|||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
nowTs = now.timestamp()
|
||||
targetStatus = SubscriptionStatusEnum.TRIALING if plan.trialDays else SubscriptionStatusEnum.ACTIVE
|
||||
subscription = MandateSubscription(
|
||||
mandateId=mandateId,
|
||||
planKey=planKey,
|
||||
status=targetStatus,
|
||||
startedAt=now.isoformat(),
|
||||
currentPeriodStart=now.isoformat(),
|
||||
startedAt=nowTs,
|
||||
currentPeriodStart=nowTs,
|
||||
)
|
||||
if plan.trialDays:
|
||||
trialEnd = now + timedelta(days=plan.trialDays)
|
||||
subscription.trialEndsAt = trialEnd.isoformat()
|
||||
subscription.currentPeriodEnd = trialEnd.isoformat()
|
||||
subscription.trialEndsAt = trialEnd.timestamp()
|
||||
subscription.currentPeriodEnd = trialEnd.timestamp()
|
||||
|
||||
subInterface = _getSubRoot()
|
||||
subInterface.createSubscription(subscription)
|
||||
|
|
@ -1716,19 +1717,19 @@ class AppObjects:
|
|||
|
||||
targetStatus = SubscriptionStatusEnum.TRIALING if plan and plan.trialDays else SubscriptionStatusEnum.ACTIVE
|
||||
additionalData = {
|
||||
"currentPeriodStart": now.isoformat(),
|
||||
"currentPeriodStart": now.timestamp(),
|
||||
}
|
||||
|
||||
if plan and plan.trialDays:
|
||||
trialEnd = now + timedelta(days=plan.trialDays)
|
||||
additionalData["trialEndsAt"] = trialEnd.isoformat()
|
||||
additionalData["currentPeriodEnd"] = trialEnd.isoformat()
|
||||
additionalData["trialEndsAt"] = trialEnd.timestamp()
|
||||
additionalData["currentPeriodEnd"] = trialEnd.timestamp()
|
||||
elif plan and plan.billingPeriod:
|
||||
from modules.datamodels.datamodelSubscription import BillingPeriodEnum
|
||||
if plan.billingPeriod == BillingPeriodEnum.MONTHLY:
|
||||
additionalData["currentPeriodEnd"] = (now + timedelta(days=30)).isoformat()
|
||||
additionalData["currentPeriodEnd"] = (now + timedelta(days=30)).timestamp()
|
||||
elif plan.billingPeriod == BillingPeriodEnum.YEARLY:
|
||||
additionalData["currentPeriodEnd"] = (now + timedelta(days=365)).isoformat()
|
||||
additionalData["currentPeriodEnd"] = (now + timedelta(days=365)).timestamp()
|
||||
|
||||
try:
|
||||
subInterface.transitionStatus(
|
||||
|
|
|
|||
|
|
@ -884,9 +884,10 @@ class BillingObjects:
|
|||
periodStartAt = periodStartAt.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
periodStartAt = periodStartAt.astimezone(timezone.utc)
|
||||
periodStartTs = periodStartAt.timestamp()
|
||||
settings = self.getOrCreateSettings(mandateId)
|
||||
prev = self._parseSettingsDateTime(settings.get("storagePeriodStartAt"))
|
||||
if prev is not None and abs((prev - periodStartAt).total_seconds()) < 2:
|
||||
prev = settings.get("storagePeriodStartAt")
|
||||
if prev is not None and abs(prev - periodStartTs) < 2:
|
||||
return
|
||||
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
|
||||
|
||||
|
|
@ -896,7 +897,7 @@ class BillingObjects:
|
|||
{
|
||||
"storageHighWatermarkMB": usedMB,
|
||||
"storageBilledUpToMB": 0.0,
|
||||
"storagePeriodStartAt": periodStartAt,
|
||||
"storagePeriodStartAt": periodStartTs,
|
||||
},
|
||||
)
|
||||
logger.info(
|
||||
|
|
@ -1044,18 +1045,9 @@ class BillingObjects:
|
|||
if not periodStart or not periodEnd:
|
||||
return None
|
||||
|
||||
if isinstance(periodStart, str):
|
||||
periodStart = datetime.fromisoformat(periodStart)
|
||||
if isinstance(periodEnd, str):
|
||||
periodEnd = datetime.fromisoformat(periodEnd)
|
||||
if periodStart.tzinfo is None:
|
||||
periodStart = periodStart.replace(tzinfo=timezone.utc)
|
||||
if periodEnd.tzinfo is None:
|
||||
periodEnd = periodEnd.replace(tzinfo=timezone.utc)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
totalSeconds = (periodEnd - periodStart).total_seconds()
|
||||
remainingSeconds = max((periodEnd - now).total_seconds(), 0)
|
||||
nowTs = datetime.now(timezone.utc).timestamp()
|
||||
totalSeconds = periodEnd - periodStart
|
||||
remainingSeconds = max(periodEnd - nowTs, 0)
|
||||
proRataFraction = remainingSeconds / totalSeconds if totalSeconds > 0 else 0
|
||||
|
||||
amount = round(abs(delta) * plan.budgetAiPerUserCHF * proRataFraction, 2)
|
||||
|
|
@ -1488,7 +1480,7 @@ class BillingObjects:
|
|||
@staticmethod
|
||||
def _mapPaginationColumns(pagination: PaginationParams) -> PaginationParams:
|
||||
"""Remap frontend column names to DB column names in filters and sort."""
|
||||
_COL_MAP = {"createdAt": "sysCreatedAt"}
|
||||
_COL_MAP: dict = {}
|
||||
_ENRICHED_COLS = {"mandateName", "userName", "mandateId", "userId"}
|
||||
import copy
|
||||
p = copy.deepcopy(pagination)
|
||||
|
|
@ -1974,7 +1966,6 @@ class BillingObjects:
|
|||
) -> List[str]:
|
||||
"""SQL DISTINCT for filter-values on BillingTransaction, scoped by mandates."""
|
||||
_COLUMN_MAP = {
|
||||
"createdAt": "sysCreatedAt",
|
||||
"mandateId": "accountId",
|
||||
"mandateName": "accountId",
|
||||
}
|
||||
|
|
|
|||
|
|
@ -224,7 +224,7 @@ class SubscriptionObjects:
|
|||
|
||||
updateData = {"status": toStatus.value}
|
||||
if toStatus in TERMINAL_STATUSES and not (additionalData or {}).get("endedAt"):
|
||||
updateData["endedAt"] = datetime.now(timezone.utc).isoformat()
|
||||
updateData["endedAt"] = datetime.now(timezone.utc).timestamp()
|
||||
if additionalData:
|
||||
updateData.update(additionalData)
|
||||
|
||||
|
|
@ -244,7 +244,7 @@ class SubscriptionObjects:
|
|||
|
||||
result = self.db.recordModify(MandateSubscription, subscriptionId, {
|
||||
"status": SubscriptionStatusEnum.EXPIRED.value,
|
||||
"endedAt": datetime.now(timezone.utc).isoformat(),
|
||||
"endedAt": datetime.now(timezone.utc).timestamp(),
|
||||
})
|
||||
logger.info("Force-expired subscription %s (was %s)", subscriptionId, currentStatus)
|
||||
return result
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ GROUP-Berechtigung:
|
|||
import logging
|
||||
import json
|
||||
import math
|
||||
import re
|
||||
from typing import List, Dict, Any, Optional, Type, Union
|
||||
from pydantic import BaseModel
|
||||
from modules.datamodels.datamodelRbac import AccessRuleContext
|
||||
|
|
@ -35,6 +36,138 @@ from modules.security.rootAccess import getRootDbAppConnector
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_ISO_DATE_RE = re.compile(r"^\d{4}-\d{2}-\d{2}$")
|
||||
|
||||
|
||||
def _rbacAppendPaginationDictFilter(
|
||||
key: str,
|
||||
val: Dict[str, Any],
|
||||
colType: str,
|
||||
whereConditions: List[str],
|
||||
whereValues: List[Any],
|
||||
) -> None:
|
||||
"""Append SQL for one pagination ``filters`` dict entry (operator + value).
|
||||
|
||||
Mirrors ``connectorDbPostgre._buildPaginationClauses`` semantics so numeric
|
||||
comparisons use ``::double precision`` instead of lexicographic ``::TEXT``.
|
||||
"""
|
||||
op = val.get("operator", "equals")
|
||||
v = val.get("value", "")
|
||||
isNumericCol = colType in ("INTEGER", "DOUBLE PRECISION")
|
||||
|
||||
if op in ("equals", "eq"):
|
||||
if colType == "BOOLEAN":
|
||||
whereConditions.append(f'COALESCE("{key}", FALSE) = %s')
|
||||
whereValues.append(str(v).lower() == "true")
|
||||
elif isNumericCol:
|
||||
try:
|
||||
whereConditions.append(f'"{key}"::double precision = %s')
|
||||
whereValues.append(float(v))
|
||||
except (ValueError, TypeError):
|
||||
whereConditions.append(f'"{key}"::TEXT = %s')
|
||||
whereValues.append(str(v))
|
||||
else:
|
||||
whereConditions.append(f'"{key}"::TEXT = %s')
|
||||
whereValues.append(str(v))
|
||||
return
|
||||
|
||||
if op == "contains":
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(f"%{v}%")
|
||||
return
|
||||
if op == "startsWith":
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(f"{v}%")
|
||||
return
|
||||
if op == "endsWith":
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(f"%{v}")
|
||||
return
|
||||
|
||||
if op in ("gt", "gte", "lt", "lte"):
|
||||
sqlOp = {"gt": ">", "gte": ">=", "lt": "<", "lte": "<="}[op]
|
||||
if isNumericCol:
|
||||
try:
|
||||
whereConditions.append(f'"{key}"::double precision {sqlOp} %s')
|
||||
whereValues.append(float(v))
|
||||
except (ValueError, TypeError):
|
||||
whereConditions.append(f'"{key}"::TEXT {sqlOp} %s')
|
||||
whereValues.append(str(v))
|
||||
else:
|
||||
whereConditions.append(f'"{key}"::TEXT {sqlOp} %s')
|
||||
whereValues.append(str(v))
|
||||
return
|
||||
|
||||
if op == "between" and isinstance(v, dict):
|
||||
fromVal = v.get("from", "")
|
||||
toVal = v.get("to", "")
|
||||
if not fromVal and not toVal:
|
||||
return
|
||||
isDateVal = bool(fromVal and _ISO_DATE_RE.match(str(fromVal))) or bool(
|
||||
toVal and _ISO_DATE_RE.match(str(toVal))
|
||||
)
|
||||
if isNumericCol and isDateVal:
|
||||
from datetime import datetime as _dt, timezone as _tz
|
||||
if fromVal and toVal:
|
||||
fromTs = _dt.strptime(str(fromVal), "%Y-%m-%d").replace(tzinfo=_tz.utc).timestamp()
|
||||
toTs = _dt.strptime(str(toVal), "%Y-%m-%d").replace(
|
||||
hour=23, minute=59, second=59, tzinfo=_tz.utc
|
||||
).timestamp()
|
||||
whereConditions.append(f'"{key}" >= %s AND "{key}" <= %s')
|
||||
whereValues.extend([fromTs, toTs])
|
||||
elif fromVal:
|
||||
fromTs = _dt.strptime(str(fromVal), "%Y-%m-%d").replace(tzinfo=_tz.utc).timestamp()
|
||||
whereConditions.append(f'"{key}" >= %s')
|
||||
whereValues.append(fromTs)
|
||||
else:
|
||||
toTs = _dt.strptime(str(toVal), "%Y-%m-%d").replace(
|
||||
hour=23, minute=59, second=59, tzinfo=_tz.utc
|
||||
).timestamp()
|
||||
whereConditions.append(f'"{key}" <= %s')
|
||||
whereValues.append(toTs)
|
||||
elif isNumericCol:
|
||||
try:
|
||||
if fromVal and toVal:
|
||||
whereConditions.append(
|
||||
f'"{key}"::double precision >= %s AND "{key}"::double precision <= %s'
|
||||
)
|
||||
whereValues.extend([float(fromVal), float(toVal)])
|
||||
elif fromVal:
|
||||
whereConditions.append(f'"{key}"::double precision >= %s')
|
||||
whereValues.append(float(fromVal))
|
||||
elif toVal:
|
||||
whereConditions.append(f'"{key}"::double precision <= %s')
|
||||
whereValues.append(float(toVal))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
else:
|
||||
if fromVal and toVal:
|
||||
whereConditions.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s')
|
||||
whereValues.extend([str(fromVal), str(toVal)])
|
||||
elif fromVal:
|
||||
whereConditions.append(f'"{key}"::TEXT >= %s')
|
||||
whereValues.append(str(fromVal))
|
||||
elif toVal:
|
||||
whereConditions.append(f'"{key}"::TEXT <= %s')
|
||||
whereValues.append(str(toVal))
|
||||
return
|
||||
|
||||
if op == "in" and isinstance(v, list):
|
||||
if not v:
|
||||
whereConditions.append("1 = 0")
|
||||
else:
|
||||
whereConditions.append(f'"{key}"::TEXT = ANY(%s)')
|
||||
whereValues.append([str(x) for x in v])
|
||||
return
|
||||
if op == "notIn" and isinstance(v, list):
|
||||
if v:
|
||||
whereConditions.append(f'NOT ("{key}"::TEXT = ANY(%s))')
|
||||
whereValues.append([str(x) for x in v])
|
||||
return
|
||||
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(str(v))
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Namespace-Mapping für statische Tabellen
|
||||
|
|
@ -401,36 +534,10 @@ def getRecordsetPaginatedWithRBAC(
|
|||
whereConditions.append(f'("{key}" IS NULL OR "{key}"::TEXT = \'\')')
|
||||
continue
|
||||
if isinstance(val, dict):
|
||||
op = val.get("operator", "equals")
|
||||
v = val.get("value", "")
|
||||
if op in ("equals", "eq"):
|
||||
whereConditions.append(f'"{key}"::TEXT = %s')
|
||||
whereValues.append(str(v))
|
||||
elif op == "contains":
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(f"%{v}%")
|
||||
elif op == "startsWith":
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(f"{v}%")
|
||||
elif op == "endsWith":
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(f"%{v}")
|
||||
elif op in ("gt", "gte", "lt", "lte"):
|
||||
sqlOp = {"gt": ">", "gte": ">=", "lt": "<", "lte": "<="}[op]
|
||||
whereConditions.append(f'"{key}"::TEXT {sqlOp} %s')
|
||||
whereValues.append(str(v))
|
||||
elif op == "between":
|
||||
fromVal = v.get("from", "") if isinstance(v, dict) else ""
|
||||
toVal = v.get("to", "") if isinstance(v, dict) else ""
|
||||
if fromVal and toVal:
|
||||
whereConditions.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s')
|
||||
whereValues.extend([str(fromVal), str(toVal)])
|
||||
elif fromVal:
|
||||
whereConditions.append(f'"{key}"::TEXT >= %s')
|
||||
whereValues.append(str(fromVal))
|
||||
elif toVal:
|
||||
whereConditions.append(f'"{key}"::TEXT <= %s')
|
||||
whereValues.append(str(toVal))
|
||||
colType = fields.get(key, "TEXT")
|
||||
_rbacAppendPaginationDictFilter(
|
||||
key, val, colType, whereConditions, whereValues
|
||||
)
|
||||
else:
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(str(val))
|
||||
|
|
@ -587,29 +694,10 @@ def getDistinctColumnValuesWithRBAC(
|
|||
whereConditions.append(f'("{key}" IS NULL OR "{key}"::TEXT = \'\')')
|
||||
continue
|
||||
if isinstance(val, dict):
|
||||
op = val.get("operator", "equals")
|
||||
v = val.get("value", "")
|
||||
if op in ("equals", "eq"):
|
||||
whereConditions.append(f'"{key}"::TEXT = %s')
|
||||
whereValues.append(str(v))
|
||||
elif op == "contains":
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(f"%{v}%")
|
||||
elif op == "between":
|
||||
fromVal = v.get("from", "") if isinstance(v, dict) else ""
|
||||
toVal = v.get("to", "") if isinstance(v, dict) else ""
|
||||
if fromVal and toVal:
|
||||
whereConditions.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s')
|
||||
whereValues.extend([str(fromVal), str(toVal)])
|
||||
elif fromVal:
|
||||
whereConditions.append(f'"{key}"::TEXT >= %s')
|
||||
whereValues.append(str(fromVal))
|
||||
elif toVal:
|
||||
whereConditions.append(f'"{key}"::TEXT <= %s')
|
||||
whereValues.append(str(toVal))
|
||||
else:
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(str(v) if isinstance(v, str) else str(val))
|
||||
colType = fields.get(key, "TEXT")
|
||||
_rbacAppendPaginationDictFilter(
|
||||
key, val, colType, whereConditions, whereValues
|
||||
)
|
||||
else:
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(str(val))
|
||||
|
|
|
|||
|
|
@ -475,6 +475,9 @@ def list_feature_instances(
|
|||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||
from modules.datamodels.datamodelFeatures import FeatureInstance
|
||||
enrichRowsWithFkLabels(items, FeatureInstance)
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
|
|
|
|||
|
|
@ -929,42 +929,17 @@ def list_roles(
|
|||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory, enrichRowsWithFkLabels
|
||||
enrichRowsWithFkLabels(result, Role)
|
||||
return handleFilterValuesInMemory(result, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
from modules.routes.routeHelpers import handleIdsInMemory
|
||||
return handleIdsInMemory(result, pagination)
|
||||
|
||||
# Apply search, filtering and sorting if pagination requested
|
||||
if paginationParams:
|
||||
# Apply search (if search term provided in filters)
|
||||
searchTerm = paginationParams.filters.get("search", "").lower() if paginationParams.filters else ""
|
||||
if searchTerm:
|
||||
searchedResult = []
|
||||
for item in result:
|
||||
roleLabel = (item.get("roleLabel") or "").lower()
|
||||
descText = (item.get("description") or "").lower()
|
||||
scopeType = (item.get("scopeType") or "").lower()
|
||||
|
||||
if searchTerm in roleLabel or searchTerm in descText or searchTerm in scopeType:
|
||||
searchedResult.append(item)
|
||||
result = searchedResult
|
||||
|
||||
# Apply filtering (if filters provided)
|
||||
if paginationParams.filters:
|
||||
# Use the interface's filter method
|
||||
filteredResult = interface._applyFilters(result, paginationParams.filters)
|
||||
else:
|
||||
filteredResult = result
|
||||
|
||||
# Apply sorting (in order of sortFields)
|
||||
if paginationParams.sort:
|
||||
sortedResult = interface._applySorting(filteredResult, paginationParams.sort)
|
||||
else:
|
||||
sortedResult = filteredResult
|
||||
|
||||
# Apply pagination
|
||||
from modules.routes.routeHelpers import applyFiltersAndSort
|
||||
sortedResult = applyFiltersAndSort(result, paginationParams)
|
||||
totalItems = len(sortedResult)
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
|
|
|
|||
|
|
@ -36,37 +36,47 @@ def _applySortFilterSearch(
|
|||
search: Optional[str] = None,
|
||||
searchableKeys: Optional[List[str]] = None,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Apply sort, filter and search to a list of dicts in-memory."""
|
||||
"""Apply sort, filter and search to a list of dicts in-memory.
|
||||
|
||||
Delegates to the shared ``applyFiltersAndSort`` from routeHelpers so that
|
||||
date-range filters (``between`` operator) and null/empty filters work
|
||||
consistently across all in-memory routes.
|
||||
"""
|
||||
from modules.routes.routeHelpers import applyFiltersAndSort
|
||||
from modules.datamodels.datamodelPagination import PaginationParams, SortField
|
||||
|
||||
filtersDict: Optional[Dict[str, Any]] = None
|
||||
if filtersJson:
|
||||
try:
|
||||
filters = json.loads(filtersJson) if isinstance(filtersJson, str) else filtersJson
|
||||
if isinstance(filters, dict):
|
||||
for key, val in filters.items():
|
||||
if val is None or val == "":
|
||||
continue
|
||||
if isinstance(val, list):
|
||||
items = [r for r in items if str(r.get(key, "")) in [str(v) for v in val]]
|
||||
else:
|
||||
items = [r for r in items if str(r.get(key, "")).lower() == str(val).lower()]
|
||||
filtersDict = json.loads(filtersJson) if isinstance(filtersJson, str) else filtersJson
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
pass
|
||||
|
||||
if search and searchableKeys:
|
||||
needle = search.lower()
|
||||
items = [r for r in items if any(needle in str(r.get(k, "")).lower() for k in searchableKeys)]
|
||||
if filtersDict is None:
|
||||
filtersDict = {}
|
||||
filtersDict["search"] = search
|
||||
|
||||
sortList = None
|
||||
if sortJson:
|
||||
try:
|
||||
sortList = json.loads(sortJson) if isinstance(sortJson, str) else sortJson
|
||||
if isinstance(sortList, list):
|
||||
for sortDef in reversed(sortList):
|
||||
field = sortDef.get("field", "")
|
||||
desc = sortDef.get("direction", "asc") == "desc"
|
||||
items.sort(key=lambda r, f=field: (r.get(f) is None, r.get(f, "")), reverse=desc)
|
||||
raw = json.loads(sortJson) if isinstance(sortJson, str) else sortJson
|
||||
if isinstance(raw, list):
|
||||
sortList = raw
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
pass
|
||||
|
||||
return items
|
||||
if not filtersDict and not sortList:
|
||||
return items
|
||||
|
||||
sortFields = [SortField(**s) for s in sortList] if sortList else []
|
||||
params = PaginationParams.model_construct(
|
||||
page=1,
|
||||
pageSize=len(items) or 1,
|
||||
filters=filtersDict or {},
|
||||
sort=sortFields,
|
||||
)
|
||||
return applyFiltersAndSort(items, params)
|
||||
|
||||
|
||||
def _distinctColumnValues(items: List[Dict[str, Any]], column: str) -> List[Optional[str]]:
|
||||
|
|
|
|||
|
|
@ -244,7 +244,7 @@ class TransactionResponse(BaseModel):
|
|||
aicoreProvider: Optional[str]
|
||||
aicoreModel: Optional[str] = None
|
||||
createdByUserId: Optional[str] = None
|
||||
createdAt: Optional[datetime]
|
||||
sysCreatedAt: Optional[datetime] = None
|
||||
mandateId: Optional[str] = None
|
||||
mandateName: Optional[str] = None
|
||||
|
||||
|
|
@ -311,7 +311,7 @@ class UserTransactionResponse(BaseModel):
|
|||
aicoreProvider: Optional[str]
|
||||
aicoreModel: Optional[str] = None
|
||||
createdByUserId: Optional[str] = None
|
||||
createdAt: Optional[datetime]
|
||||
sysCreatedAt: Optional[datetime] = None
|
||||
mandateId: Optional[str] = None
|
||||
mandateName: Optional[str] = None
|
||||
userId: Optional[str] = None
|
||||
|
|
@ -515,7 +515,7 @@ def getTransactions(
|
|||
aicoreProvider=t.get("aicoreProvider"),
|
||||
aicoreModel=t.get("aicoreModel"),
|
||||
createdByUserId=t.get("createdByUserId"),
|
||||
createdAt=t.get("sysCreatedAt"),
|
||||
sysCreatedAt=t.get("sysCreatedAt"),
|
||||
mandateId=t.get("mandateId"),
|
||||
mandateName=t.get("mandateName")
|
||||
))
|
||||
|
|
@ -1073,13 +1073,9 @@ def handleSubscriptionCheckoutCompleted(session, eventId: str) -> None:
|
|||
stripeSub = stripeToDict(stripe.Subscription.retrieve(stripeSubId, expand=["items"]))
|
||||
|
||||
if stripeSub.get("current_period_start"):
|
||||
stripeData["currentPeriodStart"] = datetime.fromtimestamp(
|
||||
stripeSub["current_period_start"], tz=timezone.utc
|
||||
).isoformat()
|
||||
stripeData["currentPeriodStart"] = float(stripeSub["current_period_start"])
|
||||
if stripeSub.get("current_period_end"):
|
||||
stripeData["currentPeriodEnd"] = datetime.fromtimestamp(
|
||||
stripeSub["current_period_end"], tz=timezone.utc
|
||||
).isoformat()
|
||||
stripeData["currentPeriodEnd"] = float(stripeSub["current_period_end"])
|
||||
|
||||
from modules.serviceCenter.services.serviceSubscription.stripeBootstrap import getStripePricesForPlan
|
||||
priceMapping = getStripePricesForPlan(planKey)
|
||||
|
|
@ -1211,13 +1207,9 @@ def _handleSubscriptionWebhook(event) -> None:
|
|||
|
||||
periodData: Dict[str, Any] = {}
|
||||
if obj.get("current_period_start"):
|
||||
periodData["currentPeriodStart"] = datetime.fromtimestamp(
|
||||
obj["current_period_start"], tz=timezone.utc
|
||||
).isoformat()
|
||||
periodData["currentPeriodStart"] = float(obj["current_period_start"])
|
||||
if obj.get("current_period_end"):
|
||||
periodData["currentPeriodEnd"] = datetime.fromtimestamp(
|
||||
obj["current_period_end"], tz=timezone.utc
|
||||
).isoformat()
|
||||
periodData["currentPeriodEnd"] = float(obj["current_period_end"])
|
||||
if periodData:
|
||||
subInterface.updateFields(subId, periodData)
|
||||
|
||||
|
|
@ -1462,7 +1454,7 @@ def _enrichTransactionRows(transactions) -> List[Dict[str, Any]]:
|
|||
aicoreProvider=t.get("aicoreProvider"),
|
||||
aicoreModel=t.get("aicoreModel"),
|
||||
createdByUserId=t.get("createdByUserId"),
|
||||
createdAt=t.get("sysCreatedAt")
|
||||
sysCreatedAt=t.get("sysCreatedAt")
|
||||
)
|
||||
result.append(row.model_dump())
|
||||
|
||||
|
|
@ -1588,7 +1580,7 @@ def getMandateViewTransactions(
|
|||
aicoreProvider=t.get("aicoreProvider"),
|
||||
aicoreModel=t.get("aicoreModel"),
|
||||
createdByUserId=t.get("createdByUserId"),
|
||||
createdAt=t.get("sysCreatedAt"),
|
||||
sysCreatedAt=t.get("sysCreatedAt"),
|
||||
mandateId=t.get("mandateId"),
|
||||
mandateName=t.get("mandateName")
|
||||
))
|
||||
|
|
@ -1879,7 +1871,7 @@ def getUserViewTransactions(
|
|||
aicoreProvider=d.get("aicoreProvider"),
|
||||
aicoreModel=d.get("aicoreModel"),
|
||||
createdByUserId=d.get("createdByUserId"),
|
||||
createdAt=d.get("sysCreatedAt") or d.get("createdAt"),
|
||||
sysCreatedAt=d.get("sysCreatedAt"),
|
||||
mandateId=d.get("mandateId"),
|
||||
mandateName=d.get("mandateName"),
|
||||
userId=d.get("userId"),
|
||||
|
|
|
|||
|
|
@ -179,7 +179,9 @@ async def get_connections(
|
|||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
try:
|
||||
return handleFilterValuesInMemory(_buildEnhancedItems(), column, pagination)
|
||||
items = _buildEnhancedItems()
|
||||
enrichRowsWithFkLabels(items, UserConnection)
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting filter values for connections: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
|
|
|||
|
|
@ -259,7 +259,6 @@ def get_files(
|
|||
)
|
||||
|
||||
from modules.routes.routeHelpers import (
|
||||
handleFilterValuesInMemory,
|
||||
handleIdsMode,
|
||||
parseCrossFilterPagination,
|
||||
)
|
||||
|
|
@ -275,16 +274,11 @@ def get_files(
|
|||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
crossPagination = parseCrossFilterPagination(column, pagination)
|
||||
recordFilter = {"sysCreatedBy": managementInterface.userId}
|
||||
try:
|
||||
from fastapi.responses import JSONResponse
|
||||
values = managementInterface.db.getDistinctColumnValues(
|
||||
FileItem, column, crossPagination, recordFilter
|
||||
)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
except Exception:
|
||||
result = managementInterface.getAllFiles(pagination=None)
|
||||
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in result]
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
from fastapi.responses import JSONResponse
|
||||
values = managementInterface.db.getDistinctColumnValues(
|
||||
FileItem, column, crossPagination, recordFilter
|
||||
)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
|
||||
if mode == "ids":
|
||||
recordFilter = {"sysCreatedBy": managementInterface.userId}
|
||||
|
|
|
|||
|
|
@ -140,15 +140,9 @@ def get_mandates(
|
|||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
if isPlatformAdmin:
|
||||
crossPagination = parseCrossFilterPagination(column, pagination)
|
||||
try:
|
||||
from fastapi.responses import JSONResponse
|
||||
values = appInterface.db.getDistinctColumnValues(Mandate, column, crossPagination)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
except Exception:
|
||||
result = appInterface.getAllMandates(pagination=None)
|
||||
items = result if isinstance(result, list) else (result.items if hasattr(result, 'items') else result)
|
||||
items = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
from fastapi.responses import JSONResponse
|
||||
values = appInterface.db.getDistinctColumnValues(Mandate, column, crossPagination)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
else:
|
||||
mandateItems = []
|
||||
for mid in adminMandateIds:
|
||||
|
|
@ -325,18 +319,19 @@ def create_mandate(
|
|||
plan = BUILTIN_PLANS.get(planKey)
|
||||
if plan:
|
||||
now = datetime.now(timezone.utc)
|
||||
nowTs = now.timestamp()
|
||||
targetStatus = SubscriptionStatusEnum.TRIALING if plan.trialDays else SubscriptionStatusEnum.ACTIVE
|
||||
sub = MandateSubscription(
|
||||
mandateId=str(newMandate.id),
|
||||
planKey=planKey,
|
||||
status=targetStatus,
|
||||
recurring=plan.autoRenew and not plan.trialDays,
|
||||
startedAt=now,
|
||||
currentPeriodStart=now,
|
||||
startedAt=nowTs,
|
||||
currentPeriodStart=nowTs,
|
||||
)
|
||||
if plan.trialDays:
|
||||
sub.trialEndsAt = now + timedelta(days=plan.trialDays)
|
||||
sub.currentPeriodEnd = now + timedelta(days=plan.trialDays)
|
||||
sub.trialEndsAt = (now + timedelta(days=plan.trialDays)).timestamp()
|
||||
sub.currentPeriodEnd = (now + timedelta(days=plan.trialDays)).timestamp()
|
||||
subInterface = _getSubRoot()
|
||||
subInterface.createSubscription(sub)
|
||||
logger.info(f"Created {targetStatus.value} subscription ({planKey}) for mandate {newMandate.id}")
|
||||
|
|
|
|||
|
|
@ -100,14 +100,9 @@ def _getUserFilterOrIds(context, paginationJson, column=None, idsMode=False):
|
|||
if idsMode:
|
||||
return handleIdsMode(rootInterface.db, UserInDB, paginationJson)
|
||||
crossPagination = parseCrossFilterPagination(column, paginationJson)
|
||||
try:
|
||||
from fastapi.responses import JSONResponse
|
||||
values = rootInterface.db.getDistinctColumnValues(UserInDB, column, crossPagination)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: v.lower()))
|
||||
except Exception:
|
||||
users = appInterface.getAllUsers()
|
||||
items = [u.model_dump() if hasattr(u, 'model_dump') else u for u in users]
|
||||
return handleFilterValuesInMemory(items, column, paginationJson, requestLang)
|
||||
from fastapi.responses import JSONResponse
|
||||
values = rootInterface.db.getDistinctColumnValues(UserInDB, column, crossPagination)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: v.lower()))
|
||||
|
||||
rootInterface = getRootInterface()
|
||||
userMandates = rootInterface.getUserMandates(str(context.user.id))
|
||||
|
|
|
|||
|
|
@ -111,27 +111,28 @@ def resolveRoleLabels(ids: List[str]) -> Dict[str, Optional[str]]:
|
|||
_BUILTIN_FK_RESOLVERS: Dict[str, Callable[[List[str]], Dict[str, str]]] = {
|
||||
"Mandate": resolveMandateLabels,
|
||||
"FeatureInstance": resolveInstanceLabels,
|
||||
"User": resolveUserLabels,
|
||||
"UserInDB": resolveUserLabels,
|
||||
"Role": resolveRoleLabels,
|
||||
}
|
||||
|
||||
|
||||
def _buildLabelResolversFromModel(modelClass: type) -> Dict[str, Callable[[List[str]], Dict[str, str]]]:
|
||||
"""
|
||||
Auto-build labelResolvers dict from fk_model / fk_target annotations on a Pydantic model.
|
||||
Maps field names to resolver functions for all fields that have a known FK target.
|
||||
Unlike ``_get_fk_sort_meta`` this does NOT require ``fk_label_field`` — the
|
||||
builtin resolvers already know which column to read.
|
||||
Auto-build labelResolvers dict from ``json_schema_extra.fk_target`` on a Pydantic model.
|
||||
Maps field names to resolver functions when the target table has a registered builtin
|
||||
resolver and ``fk_target.labelField`` is set (non-None).
|
||||
"""
|
||||
resolvers: Dict[str, Callable[[List[str]], Dict[str, str]]] = {}
|
||||
for name, fieldInfo in modelClass.model_fields.items():
|
||||
extra = fieldInfo.json_schema_extra
|
||||
if not extra or not isinstance(extra, dict):
|
||||
continue
|
||||
fkModel = extra.get("fk_model")
|
||||
tgt = extra.get("fk_target")
|
||||
if not fkModel and isinstance(tgt, dict):
|
||||
fkModel = tgt.get("table")
|
||||
if not isinstance(tgt, dict):
|
||||
continue
|
||||
if tgt.get("labelField") is None:
|
||||
continue
|
||||
fkModel = tgt.get("table")
|
||||
if fkModel and fkModel in _BUILTIN_FK_RESOLVERS:
|
||||
resolvers[name] = _BUILTIN_FK_RESOLVERS[fkModel]
|
||||
return resolvers
|
||||
|
|
@ -147,7 +148,7 @@ def enrichRowsWithFkLabels(
|
|||
"""Add ``{field}Label`` columns to each row for every FK field that has a
|
||||
registered resolver.
|
||||
|
||||
``modelClass`` — if provided, resolvers are auto-built from ``fk_model``
|
||||
``modelClass`` — if provided, resolvers are auto-built from ``fk_target``
|
||||
annotations on the Pydantic model (via ``_buildLabelResolversFromModel``).
|
||||
|
||||
``labelResolvers`` — explicit resolver map that overrides auto-built ones.
|
||||
|
|
@ -354,7 +355,14 @@ def applyFiltersAndSort(
|
|||
operator = "equals"
|
||||
value = filterValue
|
||||
|
||||
if value is None or value == "":
|
||||
if value is None:
|
||||
result = [
|
||||
item for item in result
|
||||
if item.get(field) is None or item.get(field) == ""
|
||||
]
|
||||
continue
|
||||
|
||||
if value == "":
|
||||
continue
|
||||
|
||||
result = [
|
||||
|
|
@ -455,6 +463,19 @@ def _matchesBetween(itemValue: Any, itemStr: str, value: Any) -> bool:
|
|||
if toTs is not None:
|
||||
return itemNum <= toTs
|
||||
except (ValueError, TypeError):
|
||||
# Numeric range (e.g. FormGeneratorTable column filters on INTEGER/FLOAT)
|
||||
try:
|
||||
itemNum = float(itemValue)
|
||||
fromNum = float(fromVal) if fromVal not in (None, "") else None
|
||||
toNum = float(toVal) if toVal not in (None, "") else None
|
||||
if fromNum is not None and toNum is not None:
|
||||
return fromNum <= itemNum <= toNum
|
||||
if fromNum is not None:
|
||||
return itemNum >= fromNum
|
||||
if toNum is not None:
|
||||
return itemNum <= toNum
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
fromStr = str(fromVal).lower() if fromVal else ""
|
||||
toStr = str(toVal).lower() if toVal else ""
|
||||
if fromStr and toStr:
|
||||
|
|
@ -470,13 +491,42 @@ def _extractDistinctValues(
|
|||
items: List[Dict[str, Any]],
|
||||
columnKey: str,
|
||||
requestLang: Optional[str] = None,
|
||||
) -> List[Optional[str]]:
|
||||
) -> list:
|
||||
"""Extract sorted distinct display values for a column from enriched items.
|
||||
|
||||
When the items contain a ``{columnKey}Label`` field (FK enrichment convention),
|
||||
returns ``{value, label}`` objects so the frontend shows human-readable
|
||||
labels in filter dropdowns. Otherwise returns plain strings.
|
||||
|
||||
Includes ``None`` as the last entry when at least one row has a null/empty
|
||||
value — this enables the "(Leer)" filter option in the frontend.
|
||||
"""
|
||||
_MISSING = object()
|
||||
labelKey = f"{columnKey}Label"
|
||||
hasFkLabels = any(labelKey in item for item in items[:20])
|
||||
|
||||
if hasFkLabels:
|
||||
byVal: Dict[str, str] = {}
|
||||
hasEmpty = False
|
||||
for item in items:
|
||||
val = item.get(columnKey, _MISSING)
|
||||
if val is _MISSING:
|
||||
continue
|
||||
if val is None or val == "":
|
||||
hasEmpty = True
|
||||
continue
|
||||
strVal = str(val)
|
||||
if strVal not in byVal:
|
||||
label = item.get(labelKey)
|
||||
byVal[strVal] = str(label) if label else f"NA({strVal[:8]})"
|
||||
result: list = sorted(
|
||||
[{"value": v, "label": l} for v, l in byVal.items()],
|
||||
key=lambda x: x["label"].lower(),
|
||||
)
|
||||
if hasEmpty:
|
||||
result.append(None)
|
||||
return result
|
||||
|
||||
values = set()
|
||||
hasEmpty = False
|
||||
for item in items:
|
||||
|
|
@ -496,7 +546,7 @@ def _extractDistinctValues(
|
|||
values.add(text)
|
||||
else:
|
||||
values.add(str(val))
|
||||
result: List[Optional[str]] = sorted(values, key=lambda v: v.lower())
|
||||
result = sorted(values, key=lambda v: v.lower())
|
||||
if hasEmpty:
|
||||
result.append(None)
|
||||
return result
|
||||
|
|
|
|||
|
|
@ -85,8 +85,8 @@ class InvitationResponse(BaseModel):
|
|||
roleIds: List[str]
|
||||
targetUsername: Optional[str]
|
||||
email: Optional[str]
|
||||
createdBy: str
|
||||
createdAt: float
|
||||
sysCreatedBy: str
|
||||
sysCreatedAt: float
|
||||
expiresAt: float
|
||||
usedBy: Optional[str]
|
||||
usedAt: Optional[float]
|
||||
|
|
@ -227,8 +227,8 @@ def create_invitation(
|
|||
roleIds=data.roleIds,
|
||||
targetUsername=target_username_val,
|
||||
email=email_val,
|
||||
createdBy=str(context.user.id),
|
||||
createdAt=currentTime,
|
||||
sysCreatedBy=str(context.user.id),
|
||||
sysCreatedAt=currentTime,
|
||||
expiresAt=expiresAt,
|
||||
usedBy=None,
|
||||
usedAt=None,
|
||||
|
|
@ -250,8 +250,8 @@ def create_invitation(
|
|||
roleIds=data.roleIds,
|
||||
targetUsername=target_username_val,
|
||||
email=email_val,
|
||||
createdBy=str(context.user.id),
|
||||
createdAt=currentTime,
|
||||
sysCreatedBy=str(context.user.id),
|
||||
sysCreatedAt=currentTime,
|
||||
expiresAt=expiresAt,
|
||||
usedBy=None,
|
||||
usedAt=None,
|
||||
|
|
@ -268,7 +268,6 @@ def create_invitation(
|
|||
roleIds=data.roleIds,
|
||||
targetUsername=target_username_val,
|
||||
email=email_val,
|
||||
createdBy=str(context.user.id),
|
||||
expiresAt=expiresAt,
|
||||
maxUses=data.maxUses
|
||||
)
|
||||
|
|
@ -368,8 +367,6 @@ def create_invitation(
|
|||
f"to {target_desc}, expires in {data.expiresInHours}h"
|
||||
)
|
||||
|
||||
# Invitation extends PowerOnModel: recordCreate/_saveRecord set sysCreatedAt and sysCreatedBy automatically.
|
||||
# API response uses createdAt/createdBy; map from the system fields (no separate createdAt column on model).
|
||||
return InvitationResponse(
|
||||
id=str(createdRecord.get("id")),
|
||||
token=str(createdRecord.get("token")),
|
||||
|
|
@ -378,8 +375,8 @@ def create_invitation(
|
|||
roleIds=createdRecord.get("roleIds", []),
|
||||
targetUsername=createdRecord.get("targetUsername"),
|
||||
email=createdRecord.get("email"),
|
||||
createdBy=str(createdRecord["sysCreatedBy"]),
|
||||
createdAt=float(createdRecord["sysCreatedAt"]),
|
||||
sysCreatedBy=str(createdRecord["sysCreatedBy"]),
|
||||
sysCreatedAt=float(createdRecord["sysCreatedAt"]),
|
||||
expiresAt=createdRecord.get("expiresAt"),
|
||||
usedBy=createdRecord.get("usedBy"),
|
||||
usedAt=createdRecord.get("usedAt"),
|
||||
|
|
@ -470,7 +467,9 @@ def list_invitations(
|
|||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
try:
|
||||
return handleFilterValuesInMemory(_buildInvitationItems(), column, pagination)
|
||||
items = _buildInvitationItems()
|
||||
enrichRowsWithFkLabels(items, Invitation)
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting filter values for invitations: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
|
|
|||
|
|
@ -106,11 +106,11 @@ def _autoActivatePending(subInterface, pendingSub: Dict[str, Any]) -> None:
|
|||
now = datetime.now(timezone.utc)
|
||||
targetStatus = SubscriptionStatusEnum.TRIALING if plan and plan.trialDays else SubscriptionStatusEnum.ACTIVE
|
||||
|
||||
additionalData = {"currentPeriodStart": now.isoformat()}
|
||||
additionalData = {"currentPeriodStart": now.timestamp()}
|
||||
if plan and plan.trialDays:
|
||||
trialEnd = now + timedelta(days=plan.trialDays)
|
||||
additionalData["trialEndsAt"] = trialEnd.isoformat()
|
||||
additionalData["currentPeriodEnd"] = trialEnd.isoformat()
|
||||
additionalData["trialEndsAt"] = trialEnd.timestamp()
|
||||
additionalData["currentPeriodEnd"] = trialEnd.timestamp()
|
||||
|
||||
try:
|
||||
subInterface.transitionStatus(
|
||||
|
|
|
|||
|
|
@ -486,7 +486,11 @@ def getAllSubscriptions(
|
|||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
return handleFilterValuesInMemory(_buildEnrichedSubscriptions(), column, pagination)
|
||||
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||
from modules.datamodels.datamodelSubscription import MandateSubscription
|
||||
items = _buildEnrichedSubscriptions()
|
||||
enrichRowsWithFkLabels(items, MandateSubscription)
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
return handleIdsInMemory(_buildEnrichedSubscriptions(), pagination)
|
||||
|
|
|
|||
|
|
@ -581,12 +581,9 @@ def _buildIntegrationsOverviewPayload(userId: str, user=None) -> Dict[str, Any]:
|
|||
|
||||
# --- Extractors (registered extensions, unique + per-class rows) ---
|
||||
try:
|
||||
from modules.serviceCenter.services.serviceExtraction.mainServiceExtraction import ExtractionService
|
||||
from modules.serviceCenter.services.serviceExtraction.subRegistry import ExtractorRegistry
|
||||
from modules.serviceCenter.services.serviceExtraction.subRegistry import getExtractorRegistry
|
||||
|
||||
if ExtractionService._sharedExtractorRegistry is None:
|
||||
ExtractionService._sharedExtractorRegistry = ExtractorRegistry()
|
||||
reg = ExtractionService._sharedExtractorRegistry
|
||||
reg = getExtractorRegistry()
|
||||
ext_map = reg.getExtensionToMimeMap()
|
||||
uniq = sorted({str(k).upper() for k in ext_map.keys() if k and "." not in str(k)})
|
||||
out["extractorExtensions"] = uniq
|
||||
|
|
|
|||
|
|
@ -132,7 +132,7 @@ def _updateJob(jobId: str, fields: Dict[str, Any]) -> None:
|
|||
def _markStarted(jobId: str) -> None:
|
||||
_updateJob(jobId, {
|
||||
"status": BackgroundJobStatusEnum.RUNNING.value,
|
||||
"startedAt": datetime.now(timezone.utc),
|
||||
"startedAt": datetime.now(timezone.utc).timestamp(),
|
||||
})
|
||||
|
||||
|
||||
|
|
@ -141,7 +141,7 @@ def _markSuccess(jobId: str, result: Optional[Dict[str, Any]]) -> None:
|
|||
"status": BackgroundJobStatusEnum.SUCCESS.value,
|
||||
"result": result or {},
|
||||
"progress": 100,
|
||||
"finishedAt": datetime.now(timezone.utc),
|
||||
"finishedAt": datetime.now(timezone.utc).timestamp(),
|
||||
})
|
||||
|
||||
|
||||
|
|
@ -150,7 +150,7 @@ def _markError(jobId: str, errorMessage: str) -> None:
|
|||
_updateJob(jobId, {
|
||||
"status": BackgroundJobStatusEnum.ERROR.value,
|
||||
"errorMessage": truncated,
|
||||
"finishedAt": datetime.now(timezone.utc),
|
||||
"finishedAt": datetime.now(timezone.utc).timestamp(),
|
||||
})
|
||||
|
||||
|
||||
|
|
@ -211,7 +211,7 @@ def listJobs(
|
|||
out = [r for r in out if r.get("featureInstanceId") == featureInstanceId]
|
||||
if jobType is not None:
|
||||
out = [r for r in out if r.get("jobType") == jobType]
|
||||
out.sort(key=lambda r: r.get("createdAt") or "", reverse=True)
|
||||
out.sort(key=lambda r: r.get("createdAt") or 0, reverse=True)
|
||||
return out[:limit]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -142,6 +142,7 @@ class SubscriptionService:
|
|||
self._cleanupPreparatorySubscriptions(mid)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
nowTs = now.timestamp()
|
||||
if plan.trialDays:
|
||||
initialStatus = SubscriptionStatusEnum.TRIALING
|
||||
elif isPaid:
|
||||
|
|
@ -154,19 +155,19 @@ class SubscriptionService:
|
|||
planKey=planKey,
|
||||
status=initialStatus,
|
||||
recurring=plan.autoRenew and not plan.trialDays,
|
||||
startedAt=now,
|
||||
currentPeriodStart=now,
|
||||
startedAt=nowTs,
|
||||
currentPeriodStart=nowTs,
|
||||
snapshotPricePerUserCHF=plan.pricePerUserCHF,
|
||||
snapshotPricePerInstanceCHF=plan.pricePerFeatureInstanceCHF,
|
||||
)
|
||||
|
||||
if plan.trialDays:
|
||||
sub.trialEndsAt = now + timedelta(days=plan.trialDays)
|
||||
sub.trialEndsAt = (now + timedelta(days=plan.trialDays)).timestamp()
|
||||
|
||||
if plan.billingPeriod == BillingPeriodEnum.MONTHLY:
|
||||
sub.currentPeriodEnd = now + timedelta(days=30)
|
||||
sub.currentPeriodEnd = (now + timedelta(days=30)).timestamp()
|
||||
elif plan.billingPeriod == BillingPeriodEnum.YEARLY:
|
||||
sub.currentPeriodEnd = now + timedelta(days=365)
|
||||
sub.currentPeriodEnd = (now + timedelta(days=365)).timestamp()
|
||||
|
||||
created = self._interface.createSubscription(sub)
|
||||
|
||||
|
|
@ -310,11 +311,8 @@ class SubscriptionService:
|
|||
)
|
||||
if currentOperative and currentOperative.get("currentPeriodEnd") and not isTrialPredecessor:
|
||||
periodEnd = currentOperative["currentPeriodEnd"]
|
||||
if isinstance(periodEnd, str):
|
||||
periodEnd = datetime.fromisoformat(periodEnd)
|
||||
trialEndTs = int(periodEnd.timestamp())
|
||||
subscriptionData["trial_end"] = trialEndTs
|
||||
self._interface.updateFields(subRecord["id"], {"effectiveFrom": periodEnd.isoformat()})
|
||||
subscriptionData["trial_end"] = int(periodEnd)
|
||||
self._interface.updateFields(subRecord["id"], {"effectiveFrom": periodEnd})
|
||||
|
||||
session = None
|
||||
for attempt in range(2):
|
||||
|
|
@ -509,9 +507,7 @@ class SubscriptionService:
|
|||
|
||||
periodEnd = sub.get("currentPeriodEnd")
|
||||
if periodEnd:
|
||||
if isinstance(periodEnd, str):
|
||||
periodEnd = datetime.fromisoformat(periodEnd)
|
||||
if periodEnd <= datetime.now(timezone.utc):
|
||||
if periodEnd <= datetime.now(timezone.utc).timestamp():
|
||||
raise ValueError("Cannot reactivate — period has already ended")
|
||||
|
||||
stripeSubId = sub.get("stripeSubscriptionId")
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ StripePlanPrice is updated. Other stale active Prices on the same Product
|
|||
"""
|
||||
|
||||
import logging
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from typing import Dict, Optional
|
||||
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
|
|
@ -242,8 +243,142 @@ def _validateStripeIdsExist(stripe, mapping: StripePlanPrice) -> bool:
|
|||
return False
|
||||
|
||||
|
||||
def _processOnePlan(
|
||||
stripe,
|
||||
planKey: str,
|
||||
plan: SubscriptionPlan,
|
||||
existingMapping: Optional[StripePlanPrice],
|
||||
) -> None:
|
||||
"""Reconcile or provision Stripe Products/Prices for a single plan.
|
||||
|
||||
Each call uses its own DB connection so it is safe to run in a thread pool.
|
||||
"""
|
||||
stripePeriod = _PERIOD_TO_STRIPE.get(plan.billingPeriod)
|
||||
if not stripePeriod:
|
||||
return
|
||||
|
||||
interval = stripePeriod["interval"]
|
||||
intervalCount = int(stripePeriod.get("interval_count") or 1)
|
||||
db = _getBillingDb()
|
||||
|
||||
if existingMapping:
|
||||
mapping = existingMapping
|
||||
hasAllPrices = mapping.stripePriceIdUsers and mapping.stripePriceIdInstances
|
||||
hasAllProducts = mapping.stripeProductIdUsers and mapping.stripeProductIdInstances
|
||||
if hasAllPrices and hasAllProducts:
|
||||
if _validateStripeIdsExist(stripe, mapping):
|
||||
changed = False
|
||||
reconciledUsers = _reconcilePrice(
|
||||
stripe, mapping.stripeProductIdUsers, mapping.stripePriceIdUsers,
|
||||
plan.pricePerUserCHF, interval, f"{planKey} — Benutzer-Lizenz",
|
||||
intervalCount,
|
||||
)
|
||||
if reconciledUsers != mapping.stripePriceIdUsers:
|
||||
changed = True
|
||||
|
||||
reconciledInstances = _reconcilePrice(
|
||||
stripe, mapping.stripeProductIdInstances, mapping.stripePriceIdInstances,
|
||||
plan.pricePerFeatureInstanceCHF, interval, f"{planKey} — Modul",
|
||||
intervalCount,
|
||||
)
|
||||
if reconciledInstances != mapping.stripePriceIdInstances:
|
||||
changed = True
|
||||
|
||||
_archiveOtherRecurringPrices(
|
||||
stripe, mapping.stripeProductIdUsers, reconciledUsers, interval, intervalCount,
|
||||
)
|
||||
_archiveOtherRecurringPrices(
|
||||
stripe, mapping.stripeProductIdInstances, reconciledInstances, interval, intervalCount,
|
||||
)
|
||||
|
||||
if changed:
|
||||
db.recordModify(StripePlanPrice, mapping.id, {
|
||||
"stripePriceIdUsers": reconciledUsers,
|
||||
"stripePriceIdInstances": reconciledInstances,
|
||||
})
|
||||
logger.info(
|
||||
"Reconciled Stripe prices for plan %s to catalog (CHF): users=%s, instances=%s",
|
||||
planKey, reconciledUsers, reconciledInstances,
|
||||
)
|
||||
else:
|
||||
logger.debug("Stripe prices up-to-date for plan %s", planKey)
|
||||
return
|
||||
else:
|
||||
logger.warning(
|
||||
"Stored Stripe IDs for plan %s reference unknown objects "
|
||||
"(likely wrong Stripe account or copied DB) — re-provisioning.",
|
||||
planKey,
|
||||
)
|
||||
|
||||
productIdUsers = None
|
||||
productIdInstances = None
|
||||
priceIdUsers = None
|
||||
priceIdInstances = None
|
||||
|
||||
if plan.pricePerUserCHF > 0:
|
||||
productIdUsers = _findStripeProduct(stripe, planKey, "users")
|
||||
if not productIdUsers:
|
||||
productIdUsers = _createStripeProduct(
|
||||
stripe, "Benutzer-Lizenzen", f"Benutzer-Lizenzen für {plan.title or planKey}",
|
||||
planKey, "users",
|
||||
)
|
||||
userCents = int(round(plan.pricePerUserCHF * 100))
|
||||
priceIdUsers = _findExistingStripePrice(
|
||||
stripe, productIdUsers, userCents, interval, intervalCount,
|
||||
)
|
||||
if not priceIdUsers:
|
||||
priceIdUsers = _createStripePrice(
|
||||
stripe, productIdUsers, plan.pricePerUserCHF, interval, f"{planKey} — Benutzer-Lizenz",
|
||||
intervalCount,
|
||||
)
|
||||
_archiveOtherRecurringPrices(stripe, productIdUsers, priceIdUsers, interval, intervalCount)
|
||||
|
||||
if plan.pricePerFeatureInstanceCHF > 0:
|
||||
productIdInstances = _findStripeProduct(stripe, planKey, "instances")
|
||||
if not productIdInstances:
|
||||
productIdInstances = _createStripeProduct(
|
||||
stripe, "Module", f"Module für {plan.title or planKey}",
|
||||
planKey, "instances",
|
||||
)
|
||||
instCents = int(round(plan.pricePerFeatureInstanceCHF * 100))
|
||||
priceIdInstances = _findExistingStripePrice(
|
||||
stripe, productIdInstances, instCents, interval, intervalCount,
|
||||
)
|
||||
if not priceIdInstances:
|
||||
priceIdInstances = _createStripePrice(
|
||||
stripe, productIdInstances, plan.pricePerFeatureInstanceCHF, interval,
|
||||
f"{planKey} — Modul",
|
||||
intervalCount,
|
||||
)
|
||||
_archiveOtherRecurringPrices(
|
||||
stripe, productIdInstances, priceIdInstances, interval, intervalCount,
|
||||
)
|
||||
|
||||
persistData = {
|
||||
"stripeProductId": "",
|
||||
"stripeProductIdUsers": productIdUsers,
|
||||
"stripeProductIdInstances": productIdInstances,
|
||||
"stripePriceIdUsers": priceIdUsers,
|
||||
"stripePriceIdInstances": priceIdInstances,
|
||||
}
|
||||
|
||||
if existingMapping:
|
||||
db.recordModify(StripePlanPrice, existingMapping.id, persistData)
|
||||
else:
|
||||
db.recordCreate(StripePlanPrice, StripePlanPrice(planKey=planKey, **persistData).model_dump())
|
||||
|
||||
logger.info(
|
||||
"Stripe bootstrapped for %s: users=%s/%s, instances=%s/%s",
|
||||
planKey, productIdUsers, priceIdUsers, productIdInstances, priceIdInstances,
|
||||
)
|
||||
|
||||
|
||||
def bootstrapStripePrices() -> None:
|
||||
"""Ensure all paid plans have separate Stripe Products for users and instances."""
|
||||
"""Ensure all paid plans have separate Stripe Products for users and instances.
|
||||
|
||||
Plans are processed in parallel (one thread per plan) to reduce boot time.
|
||||
Each thread uses its own DB connection; Stripe SDK is thread-safe.
|
||||
"""
|
||||
try:
|
||||
from modules.shared.stripeClient import getStripeClient
|
||||
stripe = getStripeClient()
|
||||
|
|
@ -251,132 +386,29 @@ def bootstrapStripePrices() -> None:
|
|||
logger.error("Stripe not configured — cannot bootstrap subscription prices: %s", e)
|
||||
return
|
||||
|
||||
db = _getBillingDb()
|
||||
existing = _loadExistingMappings(db)
|
||||
existing = _loadExistingMappings(_getBillingDb())
|
||||
|
||||
for planKey, plan in BUILTIN_PLANS.items():
|
||||
if plan.billingPeriod == BillingPeriodEnum.NONE:
|
||||
continue
|
||||
if plan.pricePerUserCHF == 0 and plan.pricePerFeatureInstanceCHF == 0:
|
||||
continue
|
||||
plans = [
|
||||
(planKey, plan)
|
||||
for planKey, plan in BUILTIN_PLANS.items()
|
||||
if plan.billingPeriod != BillingPeriodEnum.NONE
|
||||
and (plan.pricePerUserCHF > 0 or plan.pricePerFeatureInstanceCHF > 0)
|
||||
]
|
||||
|
||||
stripePeriod = _PERIOD_TO_STRIPE.get(plan.billingPeriod)
|
||||
if not stripePeriod:
|
||||
continue
|
||||
if not plans:
|
||||
return
|
||||
|
||||
interval = stripePeriod["interval"]
|
||||
intervalCount = int(stripePeriod.get("interval_count") or 1)
|
||||
|
||||
if planKey in existing:
|
||||
mapping = existing[planKey]
|
||||
hasAllPrices = mapping.stripePriceIdUsers and mapping.stripePriceIdInstances
|
||||
hasAllProducts = mapping.stripeProductIdUsers and mapping.stripeProductIdInstances
|
||||
if hasAllPrices and hasAllProducts:
|
||||
if _validateStripeIdsExist(stripe, mapping):
|
||||
changed = False
|
||||
reconciledUsers = _reconcilePrice(
|
||||
stripe, mapping.stripeProductIdUsers, mapping.stripePriceIdUsers,
|
||||
plan.pricePerUserCHF, interval, f"{planKey} — Benutzer-Lizenz",
|
||||
intervalCount,
|
||||
)
|
||||
if reconciledUsers != mapping.stripePriceIdUsers:
|
||||
changed = True
|
||||
|
||||
reconciledInstances = _reconcilePrice(
|
||||
stripe, mapping.stripeProductIdInstances, mapping.stripePriceIdInstances,
|
||||
plan.pricePerFeatureInstanceCHF, interval, f"{planKey} — Modul",
|
||||
intervalCount,
|
||||
)
|
||||
if reconciledInstances != mapping.stripePriceIdInstances:
|
||||
changed = True
|
||||
|
||||
_archiveOtherRecurringPrices(
|
||||
stripe, mapping.stripeProductIdUsers, reconciledUsers, interval, intervalCount,
|
||||
)
|
||||
_archiveOtherRecurringPrices(
|
||||
stripe, mapping.stripeProductIdInstances, reconciledInstances, interval, intervalCount,
|
||||
)
|
||||
|
||||
if changed:
|
||||
db.recordModify(StripePlanPrice, mapping.id, {
|
||||
"stripePriceIdUsers": reconciledUsers,
|
||||
"stripePriceIdInstances": reconciledInstances,
|
||||
})
|
||||
logger.info(
|
||||
"Reconciled Stripe prices for plan %s to catalog (CHF): users=%s, instances=%s",
|
||||
planKey, reconciledUsers, reconciledInstances,
|
||||
)
|
||||
else:
|
||||
logger.debug("Stripe prices up-to-date for plan %s", planKey)
|
||||
continue
|
||||
else:
|
||||
logger.warning(
|
||||
"Stored Stripe IDs for plan %s reference unknown objects "
|
||||
"(likely wrong Stripe account or copied DB) — re-provisioning.",
|
||||
planKey,
|
||||
)
|
||||
|
||||
productIdUsers = None
|
||||
productIdInstances = None
|
||||
priceIdUsers = None
|
||||
priceIdInstances = None
|
||||
|
||||
if plan.pricePerUserCHF > 0:
|
||||
productIdUsers = _findStripeProduct(stripe, planKey, "users")
|
||||
if not productIdUsers:
|
||||
productIdUsers = _createStripeProduct(
|
||||
stripe, "Benutzer-Lizenzen", f"Benutzer-Lizenzen für {plan.title or planKey}",
|
||||
planKey, "users",
|
||||
)
|
||||
userCents = int(round(plan.pricePerUserCHF * 100))
|
||||
priceIdUsers = _findExistingStripePrice(
|
||||
stripe, productIdUsers, userCents, interval, intervalCount,
|
||||
)
|
||||
if not priceIdUsers:
|
||||
priceIdUsers = _createStripePrice(
|
||||
stripe, productIdUsers, plan.pricePerUserCHF, interval, f"{planKey} — Benutzer-Lizenz",
|
||||
intervalCount,
|
||||
)
|
||||
_archiveOtherRecurringPrices(stripe, productIdUsers, priceIdUsers, interval, intervalCount)
|
||||
|
||||
if plan.pricePerFeatureInstanceCHF > 0:
|
||||
productIdInstances = _findStripeProduct(stripe, planKey, "instances")
|
||||
if not productIdInstances:
|
||||
productIdInstances = _createStripeProduct(
|
||||
stripe, "Module", f"Module für {plan.title or planKey}",
|
||||
planKey, "instances",
|
||||
)
|
||||
instCents = int(round(plan.pricePerFeatureInstanceCHF * 100))
|
||||
priceIdInstances = _findExistingStripePrice(
|
||||
stripe, productIdInstances, instCents, interval, intervalCount,
|
||||
)
|
||||
if not priceIdInstances:
|
||||
priceIdInstances = _createStripePrice(
|
||||
stripe, productIdInstances, plan.pricePerFeatureInstanceCHF, interval,
|
||||
f"{planKey} — Modul",
|
||||
intervalCount,
|
||||
)
|
||||
_archiveOtherRecurringPrices(
|
||||
stripe, productIdInstances, priceIdInstances, interval, intervalCount,
|
||||
)
|
||||
|
||||
persistData = {
|
||||
"stripeProductId": "",
|
||||
"stripeProductIdUsers": productIdUsers,
|
||||
"stripeProductIdInstances": productIdInstances,
|
||||
"stripePriceIdUsers": priceIdUsers,
|
||||
"stripePriceIdInstances": priceIdInstances,
|
||||
with ThreadPoolExecutor(max_workers=len(plans)) as executor:
|
||||
futures = {
|
||||
executor.submit(_processOnePlan, stripe, planKey, plan, existing.get(planKey)): planKey
|
||||
for planKey, plan in plans
|
||||
}
|
||||
|
||||
if planKey in existing:
|
||||
db.recordModify(StripePlanPrice, existing[planKey].id, persistData)
|
||||
else:
|
||||
db.recordCreate(StripePlanPrice, StripePlanPrice(planKey=planKey, **persistData).model_dump())
|
||||
|
||||
logger.info(
|
||||
"Stripe bootstrapped for %s: users=%s/%s, instances=%s/%s",
|
||||
planKey, productIdUsers, priceIdUsers, productIdInstances, priceIdInstances,
|
||||
)
|
||||
for future in as_completed(futures):
|
||||
planKey = futures[future]
|
||||
try:
|
||||
future.result()
|
||||
except Exception as e:
|
||||
logger.error("Stripe bootstrap failed for plan %s: %s", planKey, e)
|
||||
|
||||
|
||||
def getStripePricesForPlan(planKey: str) -> Optional[StripePlanPrice]:
|
||||
|
|
|
|||
|
|
@ -291,15 +291,11 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
|
|||
}
|
||||
|
||||
mergedExtra = _mergedFieldJsonExtra(field)
|
||||
fkModelName = mergedExtra.get("fk_model")
|
||||
fkTarget = mergedExtra.get("fk_target")
|
||||
if not fkModelName and isinstance(fkTarget, dict) and fkTarget.get("table"):
|
||||
fkModelName = fkTarget.get("table")
|
||||
hasFk = bool(fkModelName) or (isinstance(fkTarget, dict) and bool(fkTarget.get("table")))
|
||||
if hasFk:
|
||||
attr_def["displayField"] = f"{name}Label"
|
||||
if fkModelName:
|
||||
attr_def["fkModel"] = fkModelName
|
||||
if isinstance(fkTarget, dict) and fkTarget.get("table"):
|
||||
attr_def["fkModel"] = fkTarget["table"]
|
||||
if fkTarget.get("labelField"):
|
||||
attr_def["displayField"] = f"{name}Label"
|
||||
|
||||
# Render hints (Excel-like format string + i18n-resolved label tokens).
|
||||
# Labels are resolved server-side via resolveText() so the FE renders them
|
||||
|
|
@ -318,6 +314,37 @@ def getModelAttributeDefinitions(modelClass: Type[BaseModel] = None, userLanguag
|
|||
return {"model": model_label, "attributes": attributes}
|
||||
|
||||
|
||||
def _loadFeatureDatamodelClasses(modelClasses: Dict[str, Type[BaseModel]]) -> None:
|
||||
"""Register Pydantic models from ``modules.features.*`` ``datamodel*.py`` files."""
|
||||
features_dir = os.path.join(
|
||||
os.path.dirname(os.path.dirname(__file__)), "features"
|
||||
)
|
||||
if not os.path.isdir(features_dir):
|
||||
return
|
||||
for root, _dirs, files in os.walk(features_dir):
|
||||
for fileName in files:
|
||||
if not fileName.startswith("datamodel") or not fileName.endswith(".py"):
|
||||
continue
|
||||
fullPath = os.path.join(root, fileName)
|
||||
relPath = os.path.relpath(fullPath, features_dir)
|
||||
moduleRel = os.path.splitext(relPath)[0].replace("\\", ".").replace("/", ".")
|
||||
module_name = f"modules.features.{moduleRel}"
|
||||
try:
|
||||
module = importlib.import_module(module_name)
|
||||
for name, obj in inspect.getmembers(module):
|
||||
if (
|
||||
inspect.isclass(obj)
|
||||
and issubclass(obj, BaseModel)
|
||||
and obj != BaseModel
|
||||
):
|
||||
modelClasses[name] = obj
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Error importing feature datamodel module {module_name}: {str(e)}",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
|
||||
def getModelClasses() -> Dict[str, Type[BaseModel]]:
|
||||
"""
|
||||
Dynamically get all model classes from all model modules.
|
||||
|
|
@ -375,6 +402,8 @@ def getModelClasses() -> Dict[str, Type[BaseModel]]:
|
|||
logger.warning(f"Error importing module {module_name}: {str(e)}", exc_info=True)
|
||||
# Continue with other modules even if one fails
|
||||
|
||||
_loadFeatureDatamodelClasses(modelClasses)
|
||||
|
||||
return modelClasses
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -241,3 +241,32 @@ def _invalidateFkCache() -> None:
|
|||
with _lock:
|
||||
_cachedRelationships = None
|
||||
_cachedTableToDb = None
|
||||
|
||||
|
||||
_FK_TARGET_REQUIRED_KEYS = {"db", "table", "labelField"}
|
||||
|
||||
|
||||
def validateFkTargets() -> List[str]:
|
||||
"""Validate every ``fk_target`` dict across all registered PowerOnModel subclasses.
|
||||
|
||||
Returns a list of error strings (empty = all good).
|
||||
Each ``fk_target`` must contain exactly ``db``, ``table``, and ``labelField``
|
||||
(``labelField`` may be ``None``).
|
||||
"""
|
||||
_ensureModelsLoaded()
|
||||
errors: List[str] = []
|
||||
for tableName, modelCls in MODEL_REGISTRY.items():
|
||||
for fieldName, fieldInfo in modelCls.model_fields.items():
|
||||
extra = fieldInfo.json_schema_extra
|
||||
if not isinstance(extra, dict):
|
||||
continue
|
||||
fkTarget = extra.get("fk_target")
|
||||
if fkTarget is None:
|
||||
continue
|
||||
if not isinstance(fkTarget, dict):
|
||||
errors.append(f"{tableName}.{fieldName}: fk_target is not a dict ({type(fkTarget).__name__})")
|
||||
continue
|
||||
missing = _FK_TARGET_REQUIRED_KEYS - fkTarget.keys()
|
||||
if missing:
|
||||
errors.append(f"{tableName}.{fieldName}: fk_target missing keys {sorted(missing)}")
|
||||
return errors
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ syncToAccounting (via DataRef on documents[0]).
|
|||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, Any, List, Optional
|
||||
|
||||
from modules.datamodels.datamodelChat import ActionResult, ActionDocument
|
||||
|
|
@ -79,6 +79,31 @@ def _parseIsoDate(value: Any) -> Optional[datetime]:
|
|||
return None
|
||||
|
||||
|
||||
def _toTimestamp(value: Any) -> Optional[float]:
|
||||
"""Convert ISO date string or numeric value to UTC midnight unix timestamp."""
|
||||
if value is None or value == "":
|
||||
return None
|
||||
if isinstance(value, (int, float)):
|
||||
return float(value)
|
||||
raw = _cleanStr(value)
|
||||
if not raw:
|
||||
return None
|
||||
try:
|
||||
return datetime.strptime(raw[:10], "%Y-%m-%d").replace(tzinfo=timezone.utc).timestamp()
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def _timestampToDatetime(value: Any) -> Optional[datetime]:
|
||||
"""Convert UTC unix timestamp (float) to datetime for proximity scoring."""
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return datetime.fromtimestamp(float(value), tz=timezone.utc)
|
||||
except (ValueError, TypeError, OSError):
|
||||
return None
|
||||
|
||||
|
||||
def _normaliseAmount(value: Any) -> float:
|
||||
"""Use absolute rounded amount, since bank lines are often signed."""
|
||||
return round(abs(_parseFloat(value)), 2)
|
||||
|
|
@ -103,7 +128,7 @@ def _findBestBankMatch(
|
|||
bankRef = _normaliseRef(bankPosition.get("paymentReference") or bankPosition.get("bookingReference"))
|
||||
bankAmount = _normaliseAmount(bankPosition.get("bookingAmount"))
|
||||
bankIban = _normaliseRef(bankPosition.get("payeeIban"))
|
||||
bankDate = _parseIsoDate(bankPosition.get("valuta"))
|
||||
bankDate = _timestampToDatetime(bankPosition.get("valuta"))
|
||||
bankCompany = _normaliseCompany(bankPosition.get("company"))
|
||||
|
||||
bestScore = 0
|
||||
|
|
@ -122,7 +147,7 @@ def _findBestBankMatch(
|
|||
candidateRef = _normaliseRef(candidate.get("paymentReference") or candidate.get("bookingReference"))
|
||||
candidateAmount = _normaliseAmount(candidate.get("bookingAmount"))
|
||||
candidateIban = _normaliseRef(candidate.get("payeeIban"))
|
||||
candidateDate = _parseIsoDate(candidate.get("valuta"))
|
||||
candidateDate = _timestampToDatetime(candidate.get("valuta"))
|
||||
candidateCompany = _normaliseCompany(candidate.get("company"))
|
||||
|
||||
# Strongest signal: structured payment reference / invoice reference match.
|
||||
|
|
@ -183,7 +208,7 @@ def _recordToPosition(record: Dict[str, Any], documentId: Optional[str], feature
|
|||
return {
|
||||
"documentId": documentId,
|
||||
"documentType": recDocType,
|
||||
"valuta": record.get("valuta"),
|
||||
"valuta": _toTimestamp(record.get("valuta")),
|
||||
"transactionDateTime": record.get("transactionDateTime"),
|
||||
"company": record.get("company", ""),
|
||||
"desc": record.get("desc", ""),
|
||||
|
|
@ -203,7 +228,7 @@ def _recordToPosition(record: Dict[str, Any], documentId: Optional[str], feature
|
|||
"payeeName": _cleanStr(record.get("payeeName")),
|
||||
"payeeBic": _cleanStr(record.get("payeeBic")),
|
||||
"paymentReference": _cleanStr(record.get("paymentReference")),
|
||||
"dueDate": _cleanStr(record.get("dueDate")),
|
||||
"dueDate": _toTimestamp(record.get("dueDate")),
|
||||
"featureInstanceId": featureInstanceId,
|
||||
"mandateId": mandateId,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ This action does NOT trigger an external sync — use
|
|||
import json
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime as _dt, timezone as _tz
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from modules.datamodels.datamodelChat import ActionResult
|
||||
|
|
@ -27,6 +28,26 @@ from modules.datamodels.datamodelChat import ActionResult
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _isoToTs(isoDate: Optional[str]) -> Optional[float]:
|
||||
"""``YYYY-MM-DD`` → UTC midnight unix timestamp (or None)."""
|
||||
if not isoDate:
|
||||
return None
|
||||
try:
|
||||
return _dt.strptime(isoDate.strip()[:10], "%Y-%m-%d").replace(tzinfo=_tz.utc).timestamp()
|
||||
except (ValueError, AttributeError):
|
||||
return None
|
||||
|
||||
|
||||
def _tsToIso(ts) -> Optional[str]:
|
||||
"""Unix timestamp → ``YYYY-MM-DD`` (or None)."""
|
||||
if ts is None:
|
||||
return None
|
||||
try:
|
||||
return _dt.fromtimestamp(float(ts), tz=_tz.utc).strftime("%Y-%m-%d")
|
||||
except (ValueError, TypeError, OSError):
|
||||
return None
|
||||
|
||||
|
||||
_NAME_NORMALIZE_RE = re.compile(r"[^a-z0-9]+")
|
||||
_ENTITY_TO_MODEL = {
|
||||
"contact": "TrusteeDataContact",
|
||||
|
|
@ -224,7 +245,9 @@ def _deriveRentForContact(
|
|||
if not entries or not lines:
|
||||
return [], None
|
||||
|
||||
fromDate, toDate = _parsePeriod(period)
|
||||
fromDateStr, toDateStr = _parsePeriod(period)
|
||||
fromTs = _isoToTs(fromDateStr)
|
||||
toTs = _isoToTs(toDateStr)
|
||||
accountMatcher = _accountMatcher(accountPattern)
|
||||
nameKey = _normalizeText(contact.get("name") or "")
|
||||
contactNumber = (contact.get("contactNumber") or "").strip()
|
||||
|
|
@ -236,10 +259,10 @@ def _deriveRentForContact(
|
|||
eid = e.get("id")
|
||||
if not eid:
|
||||
continue
|
||||
bDate = e.get("bookingDate") or ""
|
||||
if fromDate and bDate and bDate < fromDate:
|
||||
bDate = e.get("bookingDate")
|
||||
if fromTs is not None and bDate is not None and float(bDate) < fromTs:
|
||||
continue
|
||||
if toDate and bDate and bDate > toDate:
|
||||
if toTs is not None and bDate is not None and float(bDate) > toTs + 86399:
|
||||
continue
|
||||
descKey = _normalizeText(" ".join([e.get("description") or "", e.get("reference") or ""]))
|
||||
if (nameKey and nameKey in descKey) or (contactNumber and contactNumber in (e.get("reference") or "")):
|
||||
|
|
@ -260,7 +283,7 @@ def _deriveRentForContact(
|
|||
amount = credit - debit
|
||||
e = entryById.get(ln.get("journalEntryId"), {})
|
||||
rentLines.append({
|
||||
"date": e.get("bookingDate"),
|
||||
"date": _tsToIso(e.get("bookingDate")),
|
||||
"ref": e.get("reference"),
|
||||
"account": accountNo,
|
||||
"amount": round(amount, 2),
|
||||
|
|
|
|||
|
|
@ -8,12 +8,33 @@ Checks lastSyncAt to avoid redundant imports unless forceRefresh is set.
|
|||
import json
|
||||
import logging
|
||||
import time
|
||||
from typing import Dict, Any
|
||||
from datetime import datetime as _dt, timezone as _tz
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from modules.datamodels.datamodelChat import ActionResult
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _isoToTs(isoDate: Optional[str]) -> Optional[float]:
|
||||
"""``YYYY-MM-DD`` → UTC midnight unix timestamp (or None)."""
|
||||
if not isoDate:
|
||||
return None
|
||||
try:
|
||||
return _dt.strptime(isoDate.strip()[:10], "%Y-%m-%d").replace(tzinfo=_tz.utc).timestamp()
|
||||
except (ValueError, AttributeError):
|
||||
return None
|
||||
|
||||
|
||||
def _tsToIso(ts) -> Optional[str]:
|
||||
"""Unix timestamp → ``YYYY-MM-DD`` (or None)."""
|
||||
if ts is None:
|
||||
return None
|
||||
try:
|
||||
return _dt.fromtimestamp(float(ts), tz=_tz.utc).strftime("%Y-%m-%d")
|
||||
except (ValueError, TypeError, OSError):
|
||||
return None
|
||||
|
||||
_SYNC_THRESHOLD_SECONDS = 3600
|
||||
|
||||
|
||||
|
|
@ -147,16 +168,18 @@ def _exportAccountingData(trusteeInterface, featureInstanceId: str, dateFrom: st
|
|||
})
|
||||
|
||||
entries = trusteeInterface.db.getRecordset(TrusteeDataJournalEntry, recordFilter=baseFilter) or []
|
||||
fromTs = _isoToTs(dateFrom)
|
||||
toTs = _isoToTs(dateTo)
|
||||
entryMap = {}
|
||||
for e in entries:
|
||||
eid = e.get("id", "")
|
||||
bDate = e.get("bookingDate", "")
|
||||
if dateFrom and bDate and bDate < dateFrom:
|
||||
bDate = e.get("bookingDate")
|
||||
if fromTs is not None and bDate is not None and float(bDate) < fromTs:
|
||||
continue
|
||||
if dateTo and bDate and bDate > dateTo:
|
||||
if toTs is not None and bDate is not None and float(bDate) > toTs + 86399:
|
||||
continue
|
||||
entryMap[eid] = {
|
||||
"date": bDate,
|
||||
"date": _tsToIso(bDate),
|
||||
"ref": e.get("reference", ""),
|
||||
"desc": e.get("description", ""),
|
||||
"amount": e.get("totalAmount", 0),
|
||||
|
|
|
|||
|
|
@ -744,8 +744,8 @@ class DynamicMode(BaseMode):
|
|||
name=name if name != 'Unknown' else 'Unknown Document',
|
||||
mimeType=mimeType if mimeType and mimeType != 'Unknown' else None,
|
||||
size=str(size) if size and size != 'Unknown' else None,
|
||||
created=str(created) if created and created != 'Unknown' else None,
|
||||
modified=str(modified) if modified and modified != 'Unknown' else None,
|
||||
created=float(created) if created is not None and created != 'Unknown' else None,
|
||||
modified=float(modified) if modified is not None and modified != 'Unknown' else None,
|
||||
typeGroup=str(typeGroup) if typeGroup and typeGroup != 'Unknown' else None,
|
||||
documentId=str(documentId) if documentId and documentId != 'Unknown' else None,
|
||||
reference=str(reference) if reference and reference != 'Unknown' else None,
|
||||
|
|
|
|||
|
|
@ -9,11 +9,17 @@ These tests exercise pure-logic paths -- no DB, no HTTP. We pass a
|
|||
would have been written to ``TrusteeDataAccountBalance``.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict, List, Type
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def _ts(isoDate: str) -> float:
|
||||
"""Convert ``YYYY-MM-DD`` to UTC midnight unix timestamp for test fixtures."""
|
||||
return datetime.strptime(isoDate, "%Y-%m-%d").replace(tzinfo=timezone.utc).timestamp()
|
||||
|
||||
from modules.features.trustee.accounting.accountingConnectorBase import AccountingPeriodBalance
|
||||
from modules.features.trustee.accounting.accountingDataSync import (
|
||||
AccountingDataSync,
|
||||
|
|
@ -124,6 +130,45 @@ class TestPersistBalancesConnectorPath:
|
|||
assert row["mandateId"] == "m-1"
|
||||
|
||||
|
||||
def test_connectorBalancesEnrichedWithJournalMovements(self):
|
||||
"""When connector provides closingBalance but no debit/credit (e.g. RMA /gl/saldo),
|
||||
the sync should enrich from journal lines."""
|
||||
entries = [
|
||||
{"id": "e1", "bookingDate": _ts("2025-06-15")},
|
||||
{"id": "e2", "bookingDate": _ts("2025-06-20")},
|
||||
]
|
||||
lines = [
|
||||
{"journalEntryId": "e1", "accountNumber": "1020", "debitAmount": 500.0, "creditAmount": 0.0},
|
||||
{"journalEntryId": "e2", "accountNumber": "1020", "debitAmount": 0.0, "creditAmount": 200.0},
|
||||
]
|
||||
db = _FakeDb(entries, lines)
|
||||
sync = AccountingDataSync(_FakeInterface(db))
|
||||
|
||||
connectorRows = [
|
||||
AccountingPeriodBalance(
|
||||
accountNumber="1020", periodYear=2025, periodMonth=6,
|
||||
openingBalance=10000.0, closingBalance=10300.0, currency="CHF",
|
||||
),
|
||||
AccountingPeriodBalance(
|
||||
accountNumber="1020", periodYear=2025, periodMonth=0,
|
||||
openingBalance=10000.0, closingBalance=10300.0, currency="CHF",
|
||||
),
|
||||
]
|
||||
|
||||
sync._persistBalances(
|
||||
"fi-1", "m-1",
|
||||
_FakeJournalEntry, _FakeJournalLine, _FakeBalance,
|
||||
connectorRows, "connector",
|
||||
)
|
||||
|
||||
byPeriod = {(r["accountNumber"], r["periodMonth"]): r for r in db.createdRows}
|
||||
assert byPeriod[("1020", 6)]["closingBalance"] == 10300.0
|
||||
assert byPeriod[("1020", 6)]["debitTotal"] == 500.0
|
||||
assert byPeriod[("1020", 6)]["creditTotal"] == 200.0
|
||||
assert byPeriod[("1020", 0)]["debitTotal"] == 500.0
|
||||
assert byPeriod[("1020", 0)]["creditTotal"] == 200.0
|
||||
|
||||
|
||||
class TestLocalFallbackCumulative:
|
||||
"""Replicates the BuHa SoHa scenario WITHOUT prior-year journal data:
|
||||
the local fallback can't recreate the prior-year carry-over (by design),
|
||||
|
|
@ -134,9 +179,9 @@ class TestLocalFallbackCumulative:
|
|||
|
||||
def test_balanceSheetAccount_cumulatesAcrossMonths(self):
|
||||
entries = [
|
||||
{"id": "e1", "bookingDate": "2025-01-15"},
|
||||
{"id": "e2", "bookingDate": "2025-02-10"},
|
||||
{"id": "e3", "bookingDate": "2025-12-20"},
|
||||
{"id": "e1", "bookingDate": _ts("2025-01-15")},
|
||||
{"id": "e2", "bookingDate": _ts("2025-02-10")},
|
||||
{"id": "e3", "bookingDate": _ts("2025-12-20")},
|
||||
]
|
||||
lines = [
|
||||
{"journalEntryId": "e1", "accountNumber": "1020", "debitAmount": 1000.0, "creditAmount": 0.0},
|
||||
|
|
@ -163,9 +208,9 @@ class TestLocalFallbackCumulative:
|
|||
|
||||
def test_incomeStatementAccount_resetsAtFiscalYearStart(self):
|
||||
entries = [
|
||||
{"id": "e1", "bookingDate": "2024-12-31"},
|
||||
{"id": "e2", "bookingDate": "2025-06-15"},
|
||||
{"id": "e3", "bookingDate": "2025-07-10"},
|
||||
{"id": "e1", "bookingDate": _ts("2024-12-31")},
|
||||
{"id": "e2", "bookingDate": _ts("2025-06-15")},
|
||||
{"id": "e3", "bookingDate": _ts("2025-07-10")},
|
||||
]
|
||||
lines = [
|
||||
{"journalEntryId": "e1", "accountNumber": "6000", "debitAmount": 99999.99, "creditAmount": 0.0},
|
||||
|
|
|
|||
Loading…
Reference in a new issue