Merge pull request #141 from valueonag/feat/demo-system-readieness
Feat/demo system readieness
This commit is contained in:
commit
f8853d23ca
214 changed files with 15822 additions and 3223 deletions
22
app.py
22
app.py
|
|
@ -294,6 +294,14 @@ except Exception as e:
|
|||
async def lifespan(app: FastAPI):
|
||||
logger.info("Application is starting up")
|
||||
|
||||
# Validate FK metadata on all Pydantic models (fail-fast, no silent fallbacks)
|
||||
from modules.shared.fkRegistry import validateFkTargets
|
||||
fkErrors = validateFkTargets()
|
||||
if fkErrors:
|
||||
for err in fkErrors:
|
||||
logger.error("FK metadata validation: %s", err)
|
||||
raise SystemExit(f"FK metadata validation failed ({len(fkErrors)} error(s)) — fix datamodels before starting")
|
||||
|
||||
# AI connectors already pre-warmed at module-load via _eager_prewarm() in aicoreModelRegistry.
|
||||
|
||||
# Bootstrap database if needed (creates initial users, mandates, roles, etc.)
|
||||
|
|
@ -327,9 +335,9 @@ async def lifespan(app: FastAPI):
|
|||
|
||||
# Sync gateway i18n registry to DB and load translation cache
|
||||
try:
|
||||
from modules.shared.i18nRegistry import _syncRegistryToDb, _loadCache
|
||||
await _syncRegistryToDb()
|
||||
await _loadCache()
|
||||
from modules.shared.i18nRegistry import syncRegistryToDb, loadCache
|
||||
await syncRegistryToDb()
|
||||
await loadCache()
|
||||
logger.info("i18n registry sync + cache load completed")
|
||||
except Exception as e:
|
||||
logger.warning(f"i18n registry sync failed (non-critical): {e}")
|
||||
|
|
@ -522,15 +530,15 @@ from modules.auth import (
|
|||
# Per-request context middleware: language (Accept-Language) + user timezone (X-User-Timezone).
|
||||
# Both are written into ContextVars and consumed by t() / resolveText() and getRequestNow()
|
||||
# without having to thread them through every call site.
|
||||
from modules.shared.i18nRegistry import _setLanguage, normalizePrimaryLanguageTag
|
||||
from modules.shared.timeUtils import _setRequestTimezone
|
||||
from modules.shared.i18nRegistry import setLanguage, normalizePrimaryLanguageTag
|
||||
from modules.shared.timeUtils import setRequestTimezone
|
||||
|
||||
@app.middleware("http")
|
||||
async def _requestContextMiddleware(request: Request, call_next):
|
||||
acceptLang = request.headers.get("Accept-Language", "")
|
||||
lang = normalizePrimaryLanguageTag(acceptLang, "de")
|
||||
_setLanguage(lang)
|
||||
_setRequestTimezone(request.headers.get("X-User-Timezone", ""))
|
||||
setLanguage(lang)
|
||||
setRequestTimezone(request.headers.get("X-User-Timezone", ""))
|
||||
return await call_next(request)
|
||||
|
||||
app.add_middleware(CSRFMiddleware)
|
||||
|
|
|
|||
BIN
assets/fonts/NotoEmoji-Regular.ttf
Normal file
BIN
assets/fonts/NotoEmoji-Regular.ttf
Normal file
Binary file not shown.
|
|
@ -37,7 +37,8 @@
|
|||
"y": 200,
|
||||
"title": "Pro Scan-Dokument",
|
||||
"parameters": {
|
||||
"level": 1,
|
||||
"items": {"type": "ref", "nodeId": "n2", "path": ["files"]},
|
||||
"level": "auto",
|
||||
"concurrency": 1
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ Feature_SyncDelta_JIRA_DELTA_TOKEN_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpEbm0yRUJ6VUJK
|
|||
# Teamsbot Browser Bot Service
|
||||
# For local testing: run the bot locally with `npm run dev` in service-teams-browser-bot
|
||||
# The bot will connect back to localhost:8000 via WebSocket
|
||||
TEAMSBOT_BROWSER_BOT_URL = https://cae-poweron-shared.redwater-53d21339.switzerlandnorth.azurecontainerapps.io
|
||||
TEAMSBOT_BROWSER_BOT_URL = http://localhost:4100
|
||||
|
||||
# Debug Configuration
|
||||
APP_DEBUG_CHAT_WORKFLOW_ENABLED = True
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import logging
|
|||
import importlib
|
||||
import os
|
||||
import time
|
||||
import threading
|
||||
from typing import Dict, List, Optional, Any, Tuple
|
||||
from modules.datamodels.datamodelAi import AiModel
|
||||
from .aicoreBase import BaseConnectorAi
|
||||
|
|
@ -31,11 +32,37 @@ class ModelRegistry:
|
|||
self._connectors: Dict[str, BaseConnectorAi] = {}
|
||||
self._lastRefresh: Optional[float] = None
|
||||
self._refreshInterval: float = 300.0 # 5 minutes
|
||||
self._refreshLock = threading.Lock()
|
||||
self._connectorsInitialized: bool = False
|
||||
self._discoveredConnectorsCache: Optional[List[BaseConnectorAi]] = None # Avoid re-instantiating on every discoverConnectors() call
|
||||
self._getAvailableModelsCache: Dict[Tuple[str, int], Tuple[List[AiModel], float]] = {} # (user_id, rbac_id) -> (models, ts)
|
||||
self._getAvailableModelsCacheTtl: float = 30.0 # seconds
|
||||
|
||||
def _addModelToDict(self, model: AiModel, connectorType: str, target: Dict[str, AiModel]):
|
||||
"""Add model to a dict, tolerating benign re-adds from the same connector."""
|
||||
if model.displayName in target:
|
||||
existing = target[model.displayName]
|
||||
if existing.name == model.name and existing.connectorType == model.connectorType:
|
||||
logger.debug(f"Skipping duplicate model '{model.displayName}' from same connector {connectorType}")
|
||||
return
|
||||
raise ValueError(
|
||||
f"displayName conflict '{model.displayName}': "
|
||||
f"existing name='{existing.name}' (connector: {existing.connectorType}), "
|
||||
f"new name='{model.name}' (connector: {connectorType})"
|
||||
)
|
||||
|
||||
if TESTING_MAX_TOKENS_OVERRIDE is not None and model.maxTokens > TESTING_MAX_TOKENS_OVERRIDE:
|
||||
originalMaxTokens = model.maxTokens
|
||||
model.maxTokens = TESTING_MAX_TOKENS_OVERRIDE
|
||||
logger.debug(f"TESTING: Overrode maxTokens for {model.displayName}: {originalMaxTokens} -> {TESTING_MAX_TOKENS_OVERRIDE}")
|
||||
|
||||
target[model.displayName] = model
|
||||
logger.debug(f"Registered model: {model.displayName} (name: {model.name}) from {connectorType}")
|
||||
|
||||
def _addModel(self, model: AiModel, connectorType: str):
|
||||
"""Convenience wrapper for adding to self._models."""
|
||||
self._addModelToDict(model, connectorType, self._models)
|
||||
|
||||
def registerConnector(self, connector: BaseConnectorAi):
|
||||
"""Register a connector and collect its models."""
|
||||
connectorType = connector.getConnectorType()
|
||||
|
|
@ -47,26 +74,10 @@ class ModelRegistry:
|
|||
|
||||
self._connectors[connectorType] = connector
|
||||
|
||||
# Collect models from this connector
|
||||
try:
|
||||
models = connector.getCachedModels()
|
||||
for model in models:
|
||||
# Validate displayName uniqueness
|
||||
if model.displayName in self._models:
|
||||
existingModel = self._models[model.displayName]
|
||||
errorMsg = f"Duplicate displayName '{model.displayName}' detected! Existing model: displayName='{existingModel.displayName}', name='{existingModel.name}' (connector: {existingModel.connectorType}), New model: displayName='{model.displayName}', name='{model.name}' (connector: {connectorType}). displayName must be unique."
|
||||
logger.error(errorMsg)
|
||||
raise ValueError(errorMsg)
|
||||
|
||||
# TODO TESTING: Override maxTokens if testing override is enabled
|
||||
if TESTING_MAX_TOKENS_OVERRIDE is not None and model.maxTokens > TESTING_MAX_TOKENS_OVERRIDE:
|
||||
originalMaxTokens = model.maxTokens
|
||||
model.maxTokens = TESTING_MAX_TOKENS_OVERRIDE
|
||||
logger.debug(f"TESTING: Overrode maxTokens for {model.displayName}: {originalMaxTokens} -> {TESTING_MAX_TOKENS_OVERRIDE}")
|
||||
|
||||
# Use displayName as the key (must be unique)
|
||||
self._models[model.displayName] = model
|
||||
logger.debug(f"Registered model: {model.displayName} (name: {model.name}) from {connectorType}")
|
||||
self._addModel(model, connectorType)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to register models from {connectorType}: {e}")
|
||||
raise
|
||||
|
|
@ -116,51 +127,40 @@ class ModelRegistry:
|
|||
self._connectorsInitialized = True
|
||||
|
||||
def refreshModels(self, force: bool = False):
|
||||
"""Refresh models from all registered connectors."""
|
||||
import time
|
||||
|
||||
"""Refresh models from all registered connectors. Thread-safe via _refreshLock."""
|
||||
self.ensureConnectorsRegistered()
|
||||
|
||||
currentTime = time.time()
|
||||
|
||||
# Check if refresh is needed
|
||||
if (not force and
|
||||
self._lastRefresh is not None and
|
||||
currentTime - self._lastRefresh < self._refreshInterval):
|
||||
return
|
||||
|
||||
logger.info("Refreshing model registry...")
|
||||
if not self._refreshLock.acquire(blocking=False):
|
||||
logger.debug("refreshModels already running in another thread, skipping")
|
||||
return
|
||||
|
||||
# Clear existing models
|
||||
self._models.clear()
|
||||
|
||||
# Re-register all connectors
|
||||
for connector in self._connectors.values():
|
||||
try:
|
||||
connector.clearCache() # Clear connector cache
|
||||
logger.info("Refreshing model registry...")
|
||||
newModels: Dict[str, AiModel] = {}
|
||||
|
||||
for connector in self._connectors.values():
|
||||
connectorType = connector.getConnectorType()
|
||||
try:
|
||||
connector.clearCache()
|
||||
models = connector.getCachedModels()
|
||||
for model in models:
|
||||
# Validate displayName uniqueness
|
||||
if model.displayName in self._models:
|
||||
existingModel = self._models[model.displayName]
|
||||
errorMsg = f"Duplicate displayName '{model.displayName}' detected! Existing model: displayName='{existingModel.displayName}', name='{existingModel.name}' (connector: {existingModel.connectorType}), New model: displayName='{model.displayName}', name='{model.name}' (connector: {connector.getConnectorType()}). displayName must be unique."
|
||||
logger.error(errorMsg)
|
||||
raise ValueError(errorMsg)
|
||||
|
||||
# TODO TESTING: Override maxTokens if testing override is enabled
|
||||
if TESTING_MAX_TOKENS_OVERRIDE is not None and model.maxTokens > TESTING_MAX_TOKENS_OVERRIDE:
|
||||
originalMaxTokens = model.maxTokens
|
||||
model.maxTokens = TESTING_MAX_TOKENS_OVERRIDE
|
||||
logger.debug(f"TESTING: Overrode maxTokens for {model.displayName}: {originalMaxTokens} -> {TESTING_MAX_TOKENS_OVERRIDE}")
|
||||
|
||||
# Use displayName as the key (must be unique)
|
||||
self._models[model.displayName] = model
|
||||
self._addModelToDict(model, connectorType, newModels)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to refresh models from {connector.getConnectorType()}: {e}")
|
||||
logger.error(f"Failed to refresh models from {connectorType}: {e}")
|
||||
raise
|
||||
|
||||
self._lastRefresh = currentTime
|
||||
self._models = newModels
|
||||
self._lastRefresh = time.time()
|
||||
logger.info(f"Model registry refreshed: {len(self._models)} models available")
|
||||
finally:
|
||||
self._refreshLock.release()
|
||||
|
||||
def getModel(self, displayName: str) -> Optional[AiModel]:
|
||||
"""Get a specific model by displayName (displayName must be unique)."""
|
||||
|
|
|
|||
|
|
@ -49,6 +49,102 @@ class AiAnthropic(BaseConnectorAi):
|
|||
def getModels(self) -> List[AiModel]:
|
||||
# Get all available Anthropic models.
|
||||
return [
|
||||
AiModel(
|
||||
name="claude-opus-4-7",
|
||||
displayName="Anthropic Claude Opus 4.7",
|
||||
connectorType="anthropic",
|
||||
apiUrl="https://api.anthropic.com/v1/messages",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=1000000,
|
||||
costPer1kTokensInput=0.005, # $5/M tokens (Anthropic API, 2026-04)
|
||||
costPer1kTokensOutput=0.025, # $25/M tokens
|
||||
speedRating=5,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.QUALITY,
|
||||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 10),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 9),
|
||||
(OperationTypeEnum.DATA_GENERATE, 10),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 9),
|
||||
(OperationTypeEnum.AGENT, 10),
|
||||
(OperationTypeEnum.DATA_QUERY, 3),
|
||||
),
|
||||
version="claude-opus-4-7",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.025
|
||||
),
|
||||
AiModel(
|
||||
name="claude-sonnet-4-6",
|
||||
displayName="Anthropic Claude Sonnet 4.6",
|
||||
connectorType="anthropic",
|
||||
apiUrl="https://api.anthropic.com/v1/messages",
|
||||
temperature=0.2,
|
||||
maxTokens=64000,
|
||||
contextLength=1000000,
|
||||
costPer1kTokensInput=0.003, # $3/M tokens
|
||||
costPer1kTokensOutput=0.015, # $15/M tokens
|
||||
speedRating=7,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.BALANCED,
|
||||
processingMode=ProcessingModeEnum.ADVANCED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 9),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 9),
|
||||
(OperationTypeEnum.DATA_GENERATE, 9),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||
(OperationTypeEnum.AGENT, 9),
|
||||
(OperationTypeEnum.DATA_QUERY, 9),
|
||||
),
|
||||
version="claude-sonnet-4-6",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.003 + (bytesReceived / 4 / 1000) * 0.015
|
||||
),
|
||||
AiModel(
|
||||
name="claude-opus-4-7",
|
||||
displayName="Anthropic Claude Opus 4.7 Vision",
|
||||
connectorType="anthropic",
|
||||
apiUrl="https://api.anthropic.com/v1/messages",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=1000000,
|
||||
costPer1kTokensInput=0.005,
|
||||
costPer1kTokensOutput=0.025,
|
||||
speedRating=5,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiImage,
|
||||
priority=PriorityEnum.QUALITY,
|
||||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.IMAGE_ANALYSE, 10)
|
||||
),
|
||||
version="claude-opus-4-7",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.025
|
||||
),
|
||||
AiModel(
|
||||
name="claude-sonnet-4-6",
|
||||
displayName="Anthropic Claude Sonnet 4.6 Vision",
|
||||
connectorType="anthropic",
|
||||
apiUrl="https://api.anthropic.com/v1/messages",
|
||||
temperature=0.2,
|
||||
maxTokens=64000,
|
||||
contextLength=1000000,
|
||||
costPer1kTokensInput=0.003,
|
||||
costPer1kTokensOutput=0.015,
|
||||
speedRating=6,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiImage,
|
||||
priority=PriorityEnum.QUALITY,
|
||||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.IMAGE_ANALYSE, 10)
|
||||
),
|
||||
version="claude-sonnet-4-6",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.003 + (bytesReceived / 4 / 1000) * 0.015
|
||||
),
|
||||
AiModel(
|
||||
name="claude-sonnet-4-5-20250929",
|
||||
displayName="Anthropic Claude Sonnet 4.5",
|
||||
|
|
|
|||
|
|
@ -123,6 +123,135 @@ class AiOpenai(BaseConnectorAi):
|
|||
version="gpt-4o",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0025 + (bytesReceived / 4 / 1000) * 0.01
|
||||
),
|
||||
AiModel(
|
||||
name="gpt-5.5",
|
||||
displayName="OpenAI GPT-5.5",
|
||||
connectorType="openai",
|
||||
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=1050000,
|
||||
costPer1kTokensInput=0.005, # $5/M tokens (OpenAI API, 2026-04)
|
||||
costPer1kTokensOutput=0.03, # $30/M tokens
|
||||
speedRating=8,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.QUALITY,
|
||||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 10),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 10),
|
||||
(OperationTypeEnum.DATA_GENERATE, 10),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||
(OperationTypeEnum.AGENT, 10),
|
||||
(OperationTypeEnum.DATA_QUERY, 8),
|
||||
),
|
||||
version="gpt-5.5",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.03
|
||||
),
|
||||
AiModel(
|
||||
name="gpt-5.4",
|
||||
displayName="OpenAI GPT-5.4",
|
||||
connectorType="openai",
|
||||
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=1050000,
|
||||
costPer1kTokensInput=0.0025, # $2.50/M tokens
|
||||
costPer1kTokensOutput=0.015, # $15/M tokens
|
||||
speedRating=8,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.BALANCED,
|
||||
processingMode=ProcessingModeEnum.ADVANCED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 9),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 10),
|
||||
(OperationTypeEnum.DATA_GENERATE, 10),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||
(OperationTypeEnum.AGENT, 9),
|
||||
(OperationTypeEnum.DATA_QUERY, 8),
|
||||
),
|
||||
version="gpt-5.4",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0025 + (bytesReceived / 4 / 1000) * 0.015
|
||||
),
|
||||
AiModel(
|
||||
name="gpt-5.4-mini",
|
||||
displayName="OpenAI GPT-5.4 Mini",
|
||||
connectorType="openai",
|
||||
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=400000,
|
||||
costPer1kTokensInput=0.00075, # $0.75/M tokens
|
||||
costPer1kTokensOutput=0.0045, # $4.50/M tokens
|
||||
speedRating=9,
|
||||
qualityRating=9,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.SPEED,
|
||||
processingMode=ProcessingModeEnum.BASIC,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 8),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 9),
|
||||
(OperationTypeEnum.DATA_GENERATE, 9),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||
(OperationTypeEnum.AGENT, 8),
|
||||
(OperationTypeEnum.DATA_QUERY, 10),
|
||||
),
|
||||
version="gpt-5.4-mini",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.00075 + (bytesReceived / 4 / 1000) * 0.0045
|
||||
),
|
||||
AiModel(
|
||||
name="gpt-5.4-nano",
|
||||
displayName="OpenAI GPT-5.4 Nano",
|
||||
connectorType="openai",
|
||||
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=400000,
|
||||
costPer1kTokensInput=0.0002, # $0.20/M tokens
|
||||
costPer1kTokensOutput=0.00125, # $1.25/M tokens
|
||||
speedRating=10,
|
||||
qualityRating=7,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.COST,
|
||||
processingMode=ProcessingModeEnum.BASIC,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 7),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 7),
|
||||
(OperationTypeEnum.DATA_GENERATE, 8),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 9),
|
||||
(OperationTypeEnum.AGENT, 7),
|
||||
(OperationTypeEnum.DATA_QUERY, 10),
|
||||
),
|
||||
version="gpt-5.4-nano",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0002 + (bytesReceived / 4 / 1000) * 0.00125
|
||||
),
|
||||
AiModel(
|
||||
name="gpt-5.5",
|
||||
displayName="OpenAI GPT-5.5 Vision",
|
||||
connectorType="openai",
|
||||
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=1050000,
|
||||
costPer1kTokensInput=0.005,
|
||||
costPer1kTokensOutput=0.03,
|
||||
speedRating=6,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiImage,
|
||||
priority=PriorityEnum.QUALITY,
|
||||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.IMAGE_ANALYSE, 10)
|
||||
),
|
||||
version="gpt-5.5",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.03
|
||||
),
|
||||
AiModel(
|
||||
name="text-embedding-3-small",
|
||||
displayName="OpenAI Embedding Small",
|
||||
|
|
@ -216,7 +345,11 @@ class AiOpenai(BaseConnectorAi):
|
|||
"model": model.name,
|
||||
"messages": messages,
|
||||
"temperature": temperature,
|
||||
"max_tokens": maxTokens
|
||||
# Universal output-length cap. `max_tokens` is deprecated and
|
||||
# rejected outright by gpt-5.x / o-series; `max_completion_tokens`
|
||||
# is accepted by every current chat-completions model (legacy
|
||||
# gpt-4o, gpt-4.1, gpt-5.x, o1/o3/o4) per OpenAI API reference.
|
||||
"max_completion_tokens": maxTokens
|
||||
}
|
||||
|
||||
if modelCall.tools:
|
||||
|
|
@ -296,7 +429,10 @@ class AiOpenai(BaseConnectorAi):
|
|||
"model": model.name,
|
||||
"messages": messages,
|
||||
"temperature": temperature,
|
||||
"max_tokens": model.maxTokens,
|
||||
# See callAiBasic for the rationale: `max_completion_tokens`
|
||||
# is the universal output-length parameter; `max_tokens` is
|
||||
# deprecated and rejected by gpt-5.x / o-series.
|
||||
"max_completion_tokens": model.maxTokens,
|
||||
"stream": True,
|
||||
}
|
||||
if modelCall.tools:
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ def _isJsonbType(fieldType) -> bool:
|
|||
return False
|
||||
|
||||
|
||||
def _get_model_fields(model_class) -> Dict[str, str]:
|
||||
def getModelFields(model_class) -> Dict[str, str]:
|
||||
"""Get all fields from Pydantic model and map to SQL types.
|
||||
|
||||
Supports explicit db_type override via json_schema_extra={"db_type": "vector(1536)"}.
|
||||
|
|
@ -121,22 +121,7 @@ def _get_model_fields(model_class) -> Dict[str, str]:
|
|||
return fields
|
||||
|
||||
|
||||
def _get_fk_sort_meta(model_class) -> Dict[str, Dict[str, str]]:
|
||||
"""Map FK field name -> {model, labelField} from json_schema_extra (fk_model + frontend_fk_display_field)."""
|
||||
result: Dict[str, Dict[str, str]] = {}
|
||||
for name, field_info in model_class.model_fields.items():
|
||||
extra = field_info.json_schema_extra
|
||||
if not extra or not isinstance(extra, dict):
|
||||
continue
|
||||
fk_model = extra.get("fk_model")
|
||||
label_field = extra.get("frontend_fk_display_field")
|
||||
if fk_model and label_field:
|
||||
result[name] = {"model": str(fk_model), "labelField": str(label_field)}
|
||||
return result
|
||||
|
||||
|
||||
|
||||
def _parseRecordFields(record: Dict[str, Any], fields: Dict[str, str], context: str = "") -> None:
|
||||
def parseRecordFields(record: Dict[str, Any], fields: Dict[str, str], context: str = "") -> None:
|
||||
"""Parse record fields in-place: numeric typing, vector parsing, JSONB deserialization."""
|
||||
import json as _json
|
||||
|
||||
|
|
@ -189,7 +174,7 @@ _current_user_id: contextvars.ContextVar[Optional[str]] = contextvars.ContextVar
|
|||
)
|
||||
|
||||
|
||||
def _get_cached_connector(
|
||||
def getCachedConnector(
|
||||
dbHost: str,
|
||||
dbDatabase: str,
|
||||
dbUser: str = None,
|
||||
|
|
@ -553,7 +538,7 @@ class DatabaseConnector:
|
|||
}
|
||||
|
||||
# Desired columns based on model
|
||||
model_fields = _get_model_fields(model_class)
|
||||
model_fields = getModelFields(model_class)
|
||||
desired_columns = set(["id"]) | set(model_fields.keys())
|
||||
|
||||
# Add missing columns
|
||||
|
|
@ -576,29 +561,48 @@ class DatabaseConnector:
|
|||
f"Could not add column '{col}' to '{table}': {add_err}"
|
||||
)
|
||||
|
||||
# Targeted type-downgrade: if a model field has been
|
||||
# changed from a structured type (JSONB) to a plain
|
||||
# TEXT field, alter the column so writes don't fail.
|
||||
# JSONB -> TEXT is a safe, lossless cast (JSONB is
|
||||
# rendered as its JSON-text representation; the
|
||||
# corresponding Pydantic ``@field_validator`` is
|
||||
# responsible for re-decoding legacy data on read).
|
||||
# Column type migrations for existing tables.
|
||||
# TEXT→DOUBLE PRECISION handles three value shapes:
|
||||
# 1. NULL / empty string → NULL
|
||||
# 2. ISO date(time) like "2025-01-22" or "2025-01-22T10:00:00+00" → epoch via EXTRACT
|
||||
# 3. Plain numeric string like "3.14" → direct cast
|
||||
_TEXT_TO_DOUBLE = (
|
||||
'DOUBLE PRECISION USING CASE'
|
||||
' WHEN "{col}" IS NULL OR "{col}" = \'\' THEN NULL'
|
||||
' WHEN "{col}" ~ \'^\\d{4}-\\d{2}-\\d{2}\''
|
||||
' THEN EXTRACT(EPOCH FROM "{col}"::timestamptz)'
|
||||
' ELSE NULLIF("{col}", \'\')::double precision'
|
||||
' END'
|
||||
)
|
||||
_SAFE_TYPE_CHANGES = {
|
||||
("jsonb", "TEXT"): "TEXT USING \"{col}\"::text",
|
||||
("text", "DOUBLE PRECISION"): _TEXT_TO_DOUBLE,
|
||||
("text", "INTEGER"): "INTEGER USING NULLIF(\"{col}\", '')::integer",
|
||||
("timestamp without time zone", "DOUBLE PRECISION"): 'DOUBLE PRECISION USING EXTRACT(EPOCH FROM "{col}" AT TIME ZONE \'UTC\')',
|
||||
("timestamp with time zone", "DOUBLE PRECISION"): 'DOUBLE PRECISION USING EXTRACT(EPOCH FROM "{col}")',
|
||||
("date", "DOUBLE PRECISION"): 'DOUBLE PRECISION USING EXTRACT(EPOCH FROM "{col}"::timestamp AT TIME ZONE \'UTC\')',
|
||||
}
|
||||
for col in sorted(desired_columns & existing_columns):
|
||||
if col == "id":
|
||||
continue
|
||||
desired_sql = (model_fields.get(col) or "").upper()
|
||||
currentType = existing_column_types.get(col, "")
|
||||
if desired_sql == "TEXT" and currentType == "jsonb":
|
||||
migration = _SAFE_TYPE_CHANGES.get((currentType, desired_sql))
|
||||
if migration:
|
||||
castExpr = migration.replace("{col}", col)
|
||||
try:
|
||||
cursor.execute('SAVEPOINT col_migrate')
|
||||
cursor.execute(
|
||||
f'ALTER TABLE "{table}" ALTER COLUMN "{col}" TYPE TEXT USING "{col}"::text'
|
||||
f'ALTER TABLE "{table}" ALTER COLUMN "{col}" TYPE {castExpr}'
|
||||
)
|
||||
cursor.execute('RELEASE SAVEPOINT col_migrate')
|
||||
logger.info(
|
||||
f"Downgraded column '{col}' from JSONB to TEXT on '{table}'"
|
||||
f"Migrated column '{col}' from {currentType} to {desired_sql} on '{table}'"
|
||||
)
|
||||
except Exception as alter_err:
|
||||
cursor.execute('ROLLBACK TO SAVEPOINT col_migrate')
|
||||
logger.warning(
|
||||
f"Could not downgrade column '{col}' on '{table}': {alter_err}"
|
||||
f"Could not migrate column '{col}' on '{table}': {alter_err}"
|
||||
)
|
||||
except Exception as ensure_err:
|
||||
logger.warning(
|
||||
|
|
@ -633,7 +637,7 @@ class DatabaseConnector:
|
|||
|
||||
def _create_table_from_model(self, cursor, table: str, model_class: type) -> None:
|
||||
"""Create table with columns matching Pydantic model fields."""
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
|
||||
# Enable pgvector if any field uses vector type
|
||||
if any(_isVectorType(sqlType) for sqlType in fields.values()):
|
||||
|
|
@ -666,7 +670,7 @@ class DatabaseConnector:
|
|||
) -> None:
|
||||
"""Save record to normalized table with explicit columns."""
|
||||
# Get columns from Pydantic model instead of database schema
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
columns = ["id"] + [field for field in fields.keys() if field != "id"]
|
||||
|
||||
if not columns:
|
||||
|
|
@ -751,9 +755,9 @@ class DatabaseConnector:
|
|||
|
||||
# Convert row to dict and handle JSONB fields
|
||||
record = dict(row)
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
|
||||
_parseRecordFields(record, fields, f"record {recordId}")
|
||||
parseRecordFields(record, fields, f"record {recordId}")
|
||||
|
||||
return record
|
||||
except Exception as e:
|
||||
|
|
@ -822,10 +826,10 @@ class DatabaseConnector:
|
|||
cursor.execute(f'SELECT * FROM "{table}" ORDER BY "id"')
|
||||
records = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
modelFields = model_class.model_fields
|
||||
for record in records:
|
||||
_parseRecordFields(record, fields, f"table {table}")
|
||||
parseRecordFields(record, fields, f"table {table}")
|
||||
# Set type-aware defaults for NULL JSONB fields
|
||||
for fieldName, fieldType in fields.items():
|
||||
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
|
||||
|
|
@ -1011,10 +1015,10 @@ class DatabaseConnector:
|
|||
cursor.execute(query, where_values)
|
||||
records = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
modelFields = model_class.model_fields
|
||||
for record in records:
|
||||
_parseRecordFields(record, fields, f"table {table}")
|
||||
parseRecordFields(record, fields, f"table {table}")
|
||||
for fieldName, fieldType in fields.items():
|
||||
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
|
||||
fieldInfo = modelFields.get(fieldName)
|
||||
|
|
@ -1055,7 +1059,7 @@ class DatabaseConnector:
|
|||
Translate PaginationParams + recordFilter into SQL clauses.
|
||||
Returns (where_clause, order_clause, limit_clause, values, count_values).
|
||||
"""
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
validColumns = set(fields.keys())
|
||||
|
||||
where_parts: List[str] = []
|
||||
|
|
@ -1111,6 +1115,13 @@ class DatabaseConnector:
|
|||
values.append(f"%{v}")
|
||||
elif op in ("gt", "gte", "lt", "lte"):
|
||||
sqlOp = {"gt": ">", "gte": ">=", "lt": "<", "lte": "<="}[op]
|
||||
if colType in ("INTEGER", "DOUBLE PRECISION"):
|
||||
try:
|
||||
where_parts.append(f'"{key}"::double precision {sqlOp} %s')
|
||||
values.append(float(v))
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
else:
|
||||
where_parts.append(f'"{key}"::TEXT {sqlOp} %s')
|
||||
values.append(str(v))
|
||||
elif op == "between":
|
||||
|
|
@ -1137,6 +1148,21 @@ class DatabaseConnector:
|
|||
toTs = _dt.strptime(str(toVal), '%Y-%m-%d').replace(hour=23, minute=59, second=59, tzinfo=_tz.utc).timestamp()
|
||||
where_parts.append(f'"{key}" <= %s')
|
||||
values.append(toTs)
|
||||
elif isNumericCol:
|
||||
try:
|
||||
if fromVal and toVal:
|
||||
where_parts.append(
|
||||
f'"{key}"::double precision >= %s AND "{key}"::double precision <= %s'
|
||||
)
|
||||
values.extend([float(fromVal), float(toVal)])
|
||||
elif fromVal:
|
||||
where_parts.append(f'"{key}"::double precision >= %s')
|
||||
values.append(float(fromVal))
|
||||
elif toVal:
|
||||
where_parts.append(f'"{key}"::double precision <= %s')
|
||||
values.append(float(toVal))
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
else:
|
||||
if fromVal and toVal:
|
||||
where_parts.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s')
|
||||
|
|
@ -1214,10 +1240,10 @@ class DatabaseConnector:
|
|||
cursor.execute(dataSql, values)
|
||||
records = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
modelFields = model_class.model_fields
|
||||
for record in records:
|
||||
_parseRecordFields(record, fields, f"table {table}")
|
||||
parseRecordFields(record, fields, f"table {table}")
|
||||
for fieldName, fieldType in fields.items():
|
||||
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
|
||||
fieldInfo = modelFields.get(fieldName)
|
||||
|
|
@ -1235,6 +1261,9 @@ class DatabaseConnector:
|
|||
if fieldFilter and isinstance(fieldFilter, list):
|
||||
records = [{f: r[f] for f in fieldFilter if f in r} for r in records]
|
||||
|
||||
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||
enrichRowsWithFkLabels(records, model_class)
|
||||
|
||||
pageSize = pagination.pageSize if pagination else max(totalItems, 1)
|
||||
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
|
||||
|
||||
|
|
@ -1249,13 +1278,18 @@ class DatabaseConnector:
|
|||
column: str,
|
||||
pagination=None,
|
||||
recordFilter: Dict[str, Any] = None,
|
||||
) -> List[str]:
|
||||
"""
|
||||
Returns sorted distinct non-null values for a column using SQL DISTINCT.
|
||||
includeEmpty: bool = True,
|
||||
) -> List[Optional[str]]:
|
||||
"""Return sorted distinct values for a column using SQL DISTINCT.
|
||||
|
||||
When ``includeEmpty`` is True (default), NULL and empty-string rows are
|
||||
represented as a single ``None`` entry at the end of the list — this
|
||||
allows the frontend to offer a "(Leer)" filter option.
|
||||
|
||||
Applies cross-filtering (all filters except the requested column).
|
||||
"""
|
||||
table = model_class.__name__
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
|
||||
if column not in fields:
|
||||
return []
|
||||
|
|
@ -1274,18 +1308,28 @@ class DatabaseConnector:
|
|||
where_clause, _, _, values, _ = \
|
||||
self._buildPaginationClauses(model_class, pagination, recordFilter)
|
||||
|
||||
sql = (
|
||||
f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{where_clause} '
|
||||
f'WHERE "{column}" IS NOT NULL AND "{column}"::TEXT != \'\' '
|
||||
if not where_clause else
|
||||
f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{where_clause} '
|
||||
f'AND "{column}" IS NOT NULL AND "{column}"::TEXT != \'\' '
|
||||
)
|
||||
sql += 'ORDER BY val'
|
||||
nonNullCond = f'"{column}" IS NOT NULL AND "{column}"::TEXT != \'\''
|
||||
if where_clause:
|
||||
sql = f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{where_clause} AND {nonNullCond} ORDER BY val'
|
||||
else:
|
||||
sql = f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}" WHERE {nonNullCond} ORDER BY val'
|
||||
|
||||
with self.connection.cursor() as cursor:
|
||||
cursor.execute(sql, values)
|
||||
return [row["val"] for row in cursor.fetchall()]
|
||||
result: List[Optional[str]] = [row["val"] for row in cursor.fetchall()]
|
||||
|
||||
if includeEmpty:
|
||||
emptyCond = f'"{column}" IS NULL OR "{column}"::TEXT = \'\''
|
||||
if where_clause:
|
||||
emptySql = f'SELECT 1 FROM "{table}"{where_clause} AND ({emptyCond}) LIMIT 1'
|
||||
else:
|
||||
emptySql = f'SELECT 1 FROM "{table}" WHERE ({emptyCond}) LIMIT 1'
|
||||
with self.connection.cursor() as cursor:
|
||||
cursor.execute(emptySql, values)
|
||||
if cursor.fetchone():
|
||||
result.append(None)
|
||||
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"Error in getDistinctColumnValues for {table}.{column}: {e}")
|
||||
return []
|
||||
|
|
@ -1419,7 +1463,7 @@ class DatabaseConnector:
|
|||
if not self._ensureTableExists(model_class):
|
||||
raise ValueError(f"Table {table} does not exist")
|
||||
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
columns = ["id"] + [f for f in fields.keys() if f != "id"]
|
||||
modelFields = model_class.model_fields
|
||||
|
||||
|
|
@ -1541,7 +1585,7 @@ class DatabaseConnector:
|
|||
if not self._ensureTableExists(model_class):
|
||||
return 0
|
||||
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
clauses: List[str] = []
|
||||
params: List[Any] = []
|
||||
for key, val in recordFilter.items():
|
||||
|
|
@ -1659,9 +1703,9 @@ class DatabaseConnector:
|
|||
cursor.execute(query, params)
|
||||
records = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
fields = _get_model_fields(modelClass)
|
||||
fields = getModelFields(modelClass)
|
||||
for record in records:
|
||||
_parseRecordFields(record, fields, f"semanticSearch {table}")
|
||||
parseRecordFields(record, fields, f"semanticSearch {table}")
|
||||
|
||||
return records
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -125,7 +125,7 @@ class AiModel(BaseModel):
|
|||
|
||||
# Metadata
|
||||
version: Optional[str] = Field(default=None, description="Model version")
|
||||
lastUpdated: Optional[str] = Field(default=None, description="Last update timestamp")
|
||||
lastUpdated: Optional[float] = Field(default=None, description="Last update timestamp (UTC unix)", json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True) # Allow Callable type
|
||||
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class AiAuditLogEntry(BaseModel):
|
|||
|
||||
userId: str = Field(
|
||||
description="ID of the user who triggered the AI call",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
username: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -43,17 +43,17 @@ class AiAuditLogEntry(BaseModel):
|
|||
)
|
||||
mandateId: str = Field(
|
||||
description="Mandate context of the call",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature instance context",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
featureCode: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature code (e.g. workspace, trustee)",
|
||||
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
|
||||
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"}},
|
||||
)
|
||||
instanceLabel: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
|
|||
|
|
@ -100,7 +100,7 @@ class AuditLogEntry(BaseModel):
|
|||
timestamp: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="UTC timestamp when the event occurred",
|
||||
json_schema_extra={"label": "Zeitstempel", "frontend_type": "datetime", "frontend_readonly": True, "frontend_required": True}
|
||||
json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": True}
|
||||
)
|
||||
|
||||
# Actor identification
|
||||
|
|
@ -111,7 +111,7 @@ class AuditLogEntry(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -130,7 +130,7 @@ class AuditLogEntry(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -142,7 +142,7 @@ class AuditLogEntry(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -193,7 +193,13 @@ class AuditLogEntry(BaseModel):
|
|||
success: bool = Field(
|
||||
default=True,
|
||||
description="Whether the action was successful",
|
||||
json_schema_extra={"label": "Erfolgreich", "frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": True}
|
||||
json_schema_extra={
|
||||
"label": "Erfolgreich",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"frontend_format_labels": ["OK", "-", "Fehler"],
|
||||
},
|
||||
)
|
||||
|
||||
errorMessage: Optional[str] = Field(
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ class BackgroundJob(PowerOnModel):
|
|||
description="Mandate scope (used for access checks). None for system-wide jobs.",
|
||||
json_schema_extra={
|
||||
"label": "Mandanten-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
|
|
@ -72,7 +72,7 @@ class BackgroundJob(PowerOnModel):
|
|||
description="Feature instance scope (optional)",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
triggeredBy: Optional[str] = Field(
|
||||
|
|
@ -113,18 +113,18 @@ class BackgroundJob(PowerOnModel):
|
|||
json_schema_extra={"label": "Fehler"},
|
||||
)
|
||||
|
||||
createdAt: datetime = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc),
|
||||
description="When the job was submitted",
|
||||
json_schema_extra={"label": "Eingereicht"},
|
||||
createdAt: float = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc).timestamp(),
|
||||
description="When the job was submitted (UTC unix)",
|
||||
json_schema_extra={"label": "Eingereicht", "frontend_type": "timestamp"},
|
||||
)
|
||||
startedAt: Optional[datetime] = Field(
|
||||
startedAt: Optional[float] = Field(
|
||||
None,
|
||||
description="When the handler began running",
|
||||
json_schema_extra={"label": "Gestartet"},
|
||||
description="When the handler began running (UTC unix)",
|
||||
json_schema_extra={"label": "Gestartet", "frontend_type": "timestamp"},
|
||||
)
|
||||
finishedAt: Optional[datetime] = Field(
|
||||
finishedAt: Optional[float] = Field(
|
||||
None,
|
||||
description="When the handler reached a terminal status",
|
||||
json_schema_extra={"label": "Beendet"},
|
||||
description="When the handler reached a terminal status (UTC unix)",
|
||||
json_schema_extra={"label": "Beendet", "frontend_type": "timestamp"},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -8,12 +8,12 @@ from pydantic import BaseModel, Field
|
|||
|
||||
from modules.shared.i18nRegistry import i18nModel
|
||||
|
||||
_MODEL_REGISTRY: Dict[str, Type["PowerOnModel"]] = {}
|
||||
MODEL_REGISTRY: Dict[str, Type["PowerOnModel"]] = {}
|
||||
|
||||
|
||||
def _getModelByTableName(tableName: str) -> Optional[Type["PowerOnModel"]]:
|
||||
"""Look up a PowerOnModel subclass by its table name (= class name)."""
|
||||
return _MODEL_REGISTRY.get(tableName)
|
||||
return MODEL_REGISTRY.get(tableName)
|
||||
|
||||
|
||||
@i18nModel("Basisdatensatz")
|
||||
|
|
@ -22,7 +22,7 @@ class PowerOnModel(BaseModel):
|
|||
|
||||
def __init_subclass__(cls, **kwargs):
|
||||
super().__init_subclass__(**kwargs)
|
||||
_MODEL_REGISTRY[cls.__name__] = cls
|
||||
MODEL_REGISTRY[cls.__name__] = cls
|
||||
|
||||
sysCreatedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
|
|
@ -46,6 +46,7 @@ class PowerOnModel(BaseModel):
|
|||
"frontend_required": False,
|
||||
"frontend_visible": False,
|
||||
"system": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
sysModifiedAt: Optional[float] = Field(
|
||||
|
|
@ -70,5 +71,6 @@ class PowerOnModel(BaseModel):
|
|||
"frontend_required": False,
|
||||
"frontend_visible": False,
|
||||
"system": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -49,12 +49,12 @@ class BillingAccount(PowerOnModel):
|
|||
mandateId: str = Field(
|
||||
...,
|
||||
description="Foreign key to Mandate",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
userId: Optional[str] = Field(
|
||||
None,
|
||||
description="Foreign key to User (None = mandate pool account, set = user audit account)",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
balance: float = Field(default=0.0, description="Current balance in CHF", json_schema_extra={"label": "Guthaben (CHF)"})
|
||||
warningThreshold: float = Field(
|
||||
|
|
@ -62,10 +62,10 @@ class BillingAccount(PowerOnModel):
|
|||
description="Warning threshold in CHF",
|
||||
json_schema_extra={"label": "Warnschwelle (CHF)"},
|
||||
)
|
||||
lastWarningAt: Optional[datetime] = Field(
|
||||
lastWarningAt: Optional[float] = Field(
|
||||
None,
|
||||
description="Last warning sent timestamp",
|
||||
json_schema_extra={"label": "Letzte Warnung"},
|
||||
description="Last warning sent timestamp (UTC unix)",
|
||||
json_schema_extra={"label": "Letzte Warnung", "frontend_type": "timestamp"},
|
||||
)
|
||||
enabled: bool = Field(default=True, description="Account is active", json_schema_extra={"label": "Aktiv"})
|
||||
|
||||
|
|
@ -81,7 +81,7 @@ class BillingTransaction(PowerOnModel):
|
|||
accountId: str = Field(
|
||||
...,
|
||||
description="Foreign key to BillingAccount",
|
||||
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount"}},
|
||||
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount", "labelField": None}},
|
||||
)
|
||||
transactionType: TransactionTypeEnum = Field(..., description="Transaction type", json_schema_extra={"label": "Typ"})
|
||||
amount: float = Field(..., description="Amount in CHF (always positive)", json_schema_extra={"label": "Betrag (CHF)"})
|
||||
|
|
@ -100,19 +100,19 @@ class BillingTransaction(PowerOnModel):
|
|||
featureInstanceId: Optional[str] = Field(
|
||||
None,
|
||||
description="Feature instance ID",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
featureCode: Optional[str] = Field(
|
||||
None,
|
||||
description="Feature code (e.g., automation)",
|
||||
json_schema_extra={"label": "Feature-Code", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
|
||||
json_schema_extra={"label": "Feature-Code", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"}},
|
||||
)
|
||||
aicoreProvider: Optional[str] = Field(None, description="AICore provider (anthropic, openai, etc.)", json_schema_extra={"label": "AI-Anbieter"})
|
||||
aicoreModel: Optional[str] = Field(None, description="AICore model name (e.g., claude-4-sonnet, gpt-4o)", json_schema_extra={"label": "AI-Modell"})
|
||||
createdByUserId: Optional[str] = Field(
|
||||
None,
|
||||
description="User who created/caused this transaction",
|
||||
json_schema_extra={"label": "Erstellt von Benutzer", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Erstellt von Benutzer", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
|
||||
# AI call metadata (for per-call analytics)
|
||||
|
|
@ -133,7 +133,7 @@ class BillingSettings(BaseModel):
|
|||
mandateId: str = Field(
|
||||
...,
|
||||
description="Foreign key to Mandate (UNIQUE)",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
|
||||
warningThresholdPercent: float = Field(
|
||||
|
|
@ -158,7 +158,7 @@ class BillingSettings(BaseModel):
|
|||
)
|
||||
rechargeMaxPerMonth: int = Field(default=3, description="Max auto-recharges per month", json_schema_extra={"label": "Max. Nachladungen/Monat"})
|
||||
rechargesThisMonth: int = Field(default=0, description="Counter: auto-recharges used this month", json_schema_extra={"label": "Nachladungen diesen Monat"})
|
||||
monthResetAt: Optional[datetime] = Field(None, description="When rechargesThisMonth was last reset", json_schema_extra={"label": "Monats-Reset"})
|
||||
monthResetAt: Optional[float] = Field(None, description="When rechargesThisMonth was last reset (UTC unix)", json_schema_extra={"label": "Monats-Reset", "frontend_type": "timestamp"})
|
||||
|
||||
# Notifications
|
||||
notifyEmails: List[str] = Field(
|
||||
|
|
@ -174,10 +174,10 @@ class BillingSettings(BaseModel):
|
|||
description="Peak indexed data volume MB this billing period",
|
||||
json_schema_extra={"label": "Speicher-Peak (MB)"},
|
||||
)
|
||||
storagePeriodStartAt: Optional[datetime] = Field(
|
||||
storagePeriodStartAt: Optional[float] = Field(
|
||||
None,
|
||||
description="Subscription billing period start used for storage reset",
|
||||
json_schema_extra={"label": "Speicher-Periodenbeginn"},
|
||||
description="Subscription billing period start used for storage reset (UTC unix)",
|
||||
json_schema_extra={"label": "Speicher-Periodenbeginn", "frontend_type": "timestamp"},
|
||||
)
|
||||
storageBilledUpToMB: float = Field(
|
||||
default=0.0,
|
||||
|
|
@ -193,9 +193,10 @@ class StripeWebhookEvent(BaseModel):
|
|||
description="Primary key",
|
||||
)
|
||||
event_id: str = Field(..., description="Stripe event ID (evt_xxx)")
|
||||
processed_at: datetime = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc),
|
||||
description="When the event was processed",
|
||||
processed_at: float = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc).timestamp(),
|
||||
description="When the event was processed (UTC unix)",
|
||||
json_schema_extra={"frontend_type": "timestamp"},
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -210,10 +211,14 @@ class UsageStatistics(BaseModel):
|
|||
accountId: str = Field(
|
||||
...,
|
||||
description="Foreign key to BillingAccount",
|
||||
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount"}},
|
||||
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount", "labelField": None}},
|
||||
)
|
||||
periodType: PeriodTypeEnum = Field(..., description="Period type", json_schema_extra={"label": "Periodentyp"})
|
||||
periodStart: date = Field(..., description="Period start date", json_schema_extra={"label": "Periodenbeginn"})
|
||||
periodStart: date = Field(
|
||||
...,
|
||||
description="Period start date",
|
||||
json_schema_extra={"label": "Periodenbeginn", "frontend_type": "date"},
|
||||
)
|
||||
|
||||
# Aggregated values
|
||||
totalCostCHF: float = Field(default=0.0, description="Total cost in CHF", json_schema_extra={"label": "Gesamtkosten (CHF)"})
|
||||
|
|
|
|||
|
|
@ -16,12 +16,12 @@ class ChatLog(PowerOnModel):
|
|||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
|
||||
workflowId: str = Field(
|
||||
description="Foreign key to workflow",
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||
)
|
||||
message: str = Field(description="Log message", json_schema_extra={"label": "Nachricht"})
|
||||
type: str = Field(description="Log type (info, warning, error, etc.)", json_schema_extra={"label": "Typ"})
|
||||
timestamp: float = Field(default_factory=getUtcTimestamp,
|
||||
description="When the log entry was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel"})
|
||||
description="When the log entry was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp"})
|
||||
status: Optional[str] = Field(None, description="Status of the log entry", json_schema_extra={"label": "Status"})
|
||||
progress: Optional[float] = Field(None, description="Progress indicator (0.0 to 1.0)", json_schema_extra={"label": "Fortschritt"})
|
||||
performance: Optional[Dict[str, Any]] = Field(None, description="Performance metrics", json_schema_extra={"label": "Leistung"})
|
||||
|
|
@ -37,11 +37,11 @@ class ChatDocument(PowerOnModel):
|
|||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
|
||||
messageId: str = Field(
|
||||
description="Foreign key to message",
|
||||
json_schema_extra={"label": "Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage"}},
|
||||
json_schema_extra={"label": "Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage", "labelField": None}},
|
||||
)
|
||||
fileId: str = Field(
|
||||
description="Foreign key to file",
|
||||
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem"}},
|
||||
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"}},
|
||||
)
|
||||
fileName: str = Field(description="Name of the file", json_schema_extra={"label": "Dateiname"})
|
||||
fileSize: int = Field(description="Size of the file", json_schema_extra={"label": "Dateigröße"})
|
||||
|
|
@ -81,12 +81,12 @@ class ChatMessage(PowerOnModel):
|
|||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
|
||||
workflowId: str = Field(
|
||||
description="Foreign key to workflow",
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||
)
|
||||
parentMessageId: Optional[str] = Field(
|
||||
None,
|
||||
description="Parent message ID for threading",
|
||||
json_schema_extra={"label": "Übergeordnete Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage"}},
|
||||
json_schema_extra={"label": "Übergeordnete Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage", "labelField": None}},
|
||||
)
|
||||
documents: List[ChatDocument] = Field(default_factory=list, description="Associated documents", json_schema_extra={"label": "Dokumente"})
|
||||
documentsLabel: Optional[str] = Field(None, description="Label for the set of documents", json_schema_extra={"label": "Dokumenten-Label"})
|
||||
|
|
@ -97,7 +97,7 @@ class ChatMessage(PowerOnModel):
|
|||
sequenceNr: Optional[int] = Field(default=0,
|
||||
description="Sequence number of the message (set automatically)", json_schema_extra={"label": "Sequenznummer"})
|
||||
publishedAt: Optional[float] = Field(default=None,
|
||||
description="When the message was published (UTC timestamp in seconds)", json_schema_extra={"label": "Veröffentlicht am"})
|
||||
description="When the message was published (UTC timestamp in seconds)", json_schema_extra={"label": "Veröffentlicht am", "frontend_type": "timestamp"})
|
||||
success: Optional[bool] = Field(None, description="Whether the message processing was successful", json_schema_extra={"label": "Erfolg"})
|
||||
actionId: Optional[str] = Field(None, description="ID of the action that produced this message", json_schema_extra={"label": "Aktions-ID"})
|
||||
actionMethod: Optional[str] = Field(None, description="Method of the action that produced this message", json_schema_extra={"label": "Aktionsmethode"})
|
||||
|
|
@ -125,7 +125,7 @@ class ChatWorkflow(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
linkedWorkflowId: Optional[str] = Field(
|
||||
|
|
@ -219,7 +219,7 @@ class UserInputRequest(BaseModel):
|
|||
workflowId: Optional[str] = Field(
|
||||
None,
|
||||
description="Optional ID of the workflow to continue",
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||
)
|
||||
allowedProviders: Optional[List[str]] = Field(None, description="List of allowed AI providers (multiselect)", json_schema_extra={"label": "Erlaubte Anbieter"})
|
||||
|
||||
|
|
@ -281,8 +281,8 @@ class ObservationPreview(BaseModel):
|
|||
# Extended metadata fields
|
||||
mimeType: Optional[str] = Field(default=None, description="MIME type", json_schema_extra={"label": "MIME-Typ"})
|
||||
size: Optional[str] = Field(default=None, description="File size", json_schema_extra={"label": "Größe"})
|
||||
created: Optional[str] = Field(default=None, description="Creation timestamp", json_schema_extra={"label": "Erstellt"})
|
||||
modified: Optional[str] = Field(default=None, description="Modification timestamp", json_schema_extra={"label": "Geändert"})
|
||||
created: Optional[float] = Field(default=None, description="Creation timestamp (UTC unix)", json_schema_extra={"label": "Erstellt", "frontend_type": "timestamp"})
|
||||
modified: Optional[float] = Field(default=None, description="Modification timestamp (UTC unix)", json_schema_extra={"label": "Geändert", "frontend_type": "timestamp"})
|
||||
typeGroup: Optional[str] = Field(default=None, description="Document type group", json_schema_extra={"label": "Typgruppe"})
|
||||
documentId: Optional[str] = Field(default=None, description="Document ID", json_schema_extra={"label": "Dokument-ID"})
|
||||
reference: Optional[str] = Field(default=None, description="Document reference", json_schema_extra={"label": "Referenz"})
|
||||
|
|
@ -332,7 +332,7 @@ class ActionItem(BaseModel):
|
|||
retryCount: int = Field(default=0, description="Number of retries attempted", json_schema_extra={"label": "Wiederholungen"})
|
||||
retryMax: int = Field(default=3, description="Maximum number of retries", json_schema_extra={"label": "Max. Wiederholungen"})
|
||||
processingTime: Optional[float] = Field(None, description="Processing time in seconds", json_schema_extra={"label": "Bearbeitungszeit"})
|
||||
timestamp: float = Field(..., description="When the action was executed (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel"})
|
||||
timestamp: float = Field(..., description="When the action was executed (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp"})
|
||||
result: Optional[str] = Field(None, description="Result of the action", json_schema_extra={"label": "Ergebnis"})
|
||||
|
||||
def setSuccess(self, result: str = None) -> None:
|
||||
|
|
@ -361,13 +361,13 @@ class TaskItem(BaseModel):
|
|||
workflowId: str = Field(
|
||||
...,
|
||||
description="Workflow ID",
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||
)
|
||||
userInput: str = Field(..., description="User input that triggered the task", json_schema_extra={"label": "Benutzereingabe"})
|
||||
status: TaskStatus = Field(default=TaskStatus.PENDING, description="Task status", json_schema_extra={"label": "Status"})
|
||||
error: Optional[str] = Field(None, description="Error message if task failed", json_schema_extra={"label": "Fehler"})
|
||||
startedAt: Optional[float] = Field(None, description="When the task started (UTC timestamp in seconds)", json_schema_extra={"label": "Gestartet am"})
|
||||
finishedAt: Optional[float] = Field(None, description="When the task finished (UTC timestamp in seconds)", json_schema_extra={"label": "Beendet am"})
|
||||
startedAt: Optional[float] = Field(None, description="When the task started (UTC timestamp in seconds)", json_schema_extra={"label": "Gestartet am", "frontend_type": "timestamp"})
|
||||
finishedAt: Optional[float] = Field(None, description="When the task finished (UTC timestamp in seconds)", json_schema_extra={"label": "Beendet am", "frontend_type": "timestamp"})
|
||||
actionList: List[ActionItem] = Field(default_factory=list, description="List of actions to execute", json_schema_extra={"label": "Aktionen"})
|
||||
retryCount: int = Field(default=0, description="Number of retries attempted", json_schema_extra={"label": "Wiederholungen"})
|
||||
retryMax: int = Field(default=3, description="Maximum number of retries", json_schema_extra={"label": "Max. Wiederholungen"})
|
||||
|
|
@ -402,7 +402,7 @@ class TaskHandover(BaseModel):
|
|||
improvements: List[str] = Field(default_factory=list, description="Improvement suggestions", json_schema_extra={"label": "Verbesserungen"})
|
||||
workflowSummary: Optional[str] = Field(None, description="Summarized workflow context", json_schema_extra={"label": "Workflow-Zusammenfassung"})
|
||||
messageHistory: List[str] = Field(default_factory=list, description="Key message summaries", json_schema_extra={"label": "Nachrichtenverlauf"})
|
||||
timestamp: float = Field(..., description="When the handover was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel"})
|
||||
timestamp: float = Field(..., description="When the handover was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp"})
|
||||
handoverType: str = Field(default="task", description="Type of handover: task, phase, or workflow", json_schema_extra={"label": "Übergabetyp"})
|
||||
|
||||
class TaskContext(BaseModel):
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class ContentObject(BaseModel):
|
|||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
fileId: str = Field(
|
||||
description="FK to the physical file",
|
||||
json_schema_extra={"fk_target": {"db": "poweron_management", "table": "FileItem"}},
|
||||
json_schema_extra={"fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"}},
|
||||
)
|
||||
contentType: str = Field(description="text, image, videostream, audiostream, other")
|
||||
data: str = Field(default="", description="Content data (text, base64, URL)")
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class DataSource(PowerOnModel):
|
|||
)
|
||||
connectionId: str = Field(
|
||||
description="FK to UserConnection",
|
||||
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection"}},
|
||||
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection", "labelField": "externalUsername"}},
|
||||
)
|
||||
sourceType: str = Field(
|
||||
description="sharepointFolder, googleDriveFolder, outlookFolder, ftpFolder, clickupList (path under /team/...)",
|
||||
|
|
@ -45,17 +45,17 @@ class DataSource(PowerOnModel):
|
|||
featureInstanceId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Scoped to feature instance",
|
||||
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate scope",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
userId: str = Field(
|
||||
default="",
|
||||
description="Owner user ID",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
autoSync: bool = Field(
|
||||
default=False,
|
||||
|
|
@ -65,7 +65,7 @@ class DataSource(PowerOnModel):
|
|||
lastSynced: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Last sync timestamp",
|
||||
json_schema_extra={"label": "Letzter Sync"},
|
||||
json_schema_extra={"label": "Letzter Sync", "frontend_type": "timestamp"},
|
||||
)
|
||||
scope: str = Field(
|
||||
default="personal",
|
||||
|
|
@ -91,5 +91,9 @@ class ExternalEntry(BaseModel):
|
|||
isFolder: bool = Field(default=False, description="True if directory/folder")
|
||||
size: Optional[int] = Field(default=None, description="File size in bytes")
|
||||
mimeType: Optional[str] = Field(default=None, description="MIME type (files only)")
|
||||
lastModified: Optional[float] = Field(default=None, description="Last modification timestamp")
|
||||
lastModified: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Last modification timestamp",
|
||||
json_schema_extra={"frontend_type": "timestamp"},
|
||||
)
|
||||
metadata: Dict[str, Any] = Field(default_factory=dict, description="Provider-specific metadata")
|
||||
|
|
|
|||
|
|
@ -23,11 +23,11 @@ class FeatureDataSource(PowerOnModel):
|
|||
)
|
||||
featureInstanceId: str = Field(
|
||||
description="FK to FeatureInstance",
|
||||
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
featureCode: str = Field(
|
||||
description="Feature code (e.g. trustee, commcoach)",
|
||||
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
|
||||
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"}},
|
||||
)
|
||||
tableName: str = Field(
|
||||
description="Table name from DATA_OBJECTS meta (e.g. TrusteePosition)",
|
||||
|
|
@ -44,16 +44,16 @@ class FeatureDataSource(PowerOnModel):
|
|||
mandateId: str = Field(
|
||||
default="",
|
||||
description="Mandate scope",
|
||||
json_schema_extra={"label": "Mandant", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandant", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
userId: str = Field(
|
||||
default="",
|
||||
description="Owner user ID",
|
||||
json_schema_extra={"label": "Benutzer", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
workspaceInstanceId: str = Field(
|
||||
description="Workspace feature instance where this source is used",
|
||||
json_schema_extra={"label": "Workspace", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Workspace", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
scope: str = Field(
|
||||
default="personal",
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ class FeatureInstance(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"},
|
||||
},
|
||||
)
|
||||
mandateId: str = Field(
|
||||
|
|
@ -53,7 +53,7 @@ class FeatureInstance(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
label: str = Field(
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ class FileFolder(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_management", "table": "FileFolder"},
|
||||
"fk_target": {"db": "poweron_management", "table": "FileFolder", "labelField": "name"},
|
||||
},
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
|
|
@ -40,7 +40,7 @@ class FileFolder(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
|
|
@ -51,7 +51,7 @@ class FileFolder(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
scope: str = Field(
|
||||
|
|
|
|||
|
|
@ -30,10 +30,7 @@ class FileItem(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_fk_source": "/api/mandates/",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_model": "Mandate",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
|
|
@ -44,10 +41,7 @@ class FileItem(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_fk_source": "/api/features/instances",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_model": "FeatureInstance",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
mimeType: str = Field(
|
||||
|
|
@ -82,7 +76,7 @@ class FileItem(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_management", "table": "FileFolder"},
|
||||
"fk_target": {"db": "poweron_management", "table": "FileFolder", "labelField": "name"},
|
||||
},
|
||||
)
|
||||
description: Optional[str] = Field(
|
||||
|
|
|
|||
|
|
@ -5,10 +5,11 @@ Invitation model for self-service onboarding.
|
|||
Token-basierte Einladungen für neue User zu Mandanten/Features.
|
||||
"""
|
||||
|
||||
import time
|
||||
import uuid
|
||||
import secrets
|
||||
from typing import Optional, List
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel, Field, computed_field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.i18nRegistry import i18nModel
|
||||
|
||||
|
|
@ -37,7 +38,7 @@ class Invitation(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
|
|
@ -48,7 +49,7 @@ class Invitation(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
roleIds: List[str] = Field(
|
||||
|
|
@ -80,7 +81,7 @@ class Invitation(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
usedAt: Optional[float] = Field(
|
||||
|
|
@ -94,10 +95,26 @@ class Invitation(PowerOnModel):
|
|||
json_schema_extra={"label": "Widerrufen am", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
|
||||
emailSent: Optional[bool] = Field(
|
||||
emailSentFlag: Optional[bool] = Field(
|
||||
default=False,
|
||||
description="Whether the invitation email was successfully sent",
|
||||
json_schema_extra={"label": "E-Mail gesendet", "frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": False}
|
||||
json_schema_extra={
|
||||
"label": "E-Mail gesendet",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
emailSentAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Timestamp when the invitation email was sent (UTC, seconds)",
|
||||
json_schema_extra={
|
||||
"label": "E-Mail gesendet am",
|
||||
"frontend_type": "timestamp",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
|
||||
maxUses: int = Field(
|
||||
|
|
@ -113,3 +130,33 @@ class Invitation(PowerOnModel):
|
|||
description="Current number of times this invitation has been used",
|
||||
json_schema_extra={"label": "Aktuelle Verwendungen", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
|
||||
@computed_field( # type: ignore[prop-decorator]
|
||||
json_schema_extra={
|
||||
"label": "Abgelaufen",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
@property
|
||||
def expiredFlag(self) -> bool:
|
||||
"""True iff `expiresAt` lies in the past (UTC)."""
|
||||
if self.expiresAt is None:
|
||||
return False
|
||||
return float(self.expiresAt) < time.time()
|
||||
|
||||
@computed_field( # type: ignore[prop-decorator]
|
||||
json_schema_extra={
|
||||
"label": "Verbraucht",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
@property
|
||||
def usedUpFlag(self) -> bool:
|
||||
"""True iff `currentUses >= maxUses`."""
|
||||
return (self.currentUses or 0) >= (self.maxUses or 1)
|
||||
|
|
|
|||
|
|
@ -30,17 +30,17 @@ class FileContentIndex(PowerOnModel):
|
|||
)
|
||||
userId: str = Field(
|
||||
description="Owner user ID",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
default="",
|
||||
description="Feature instance scope",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
mandateId: str = Field(
|
||||
default="",
|
||||
description="Mandate scope",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
fileName: str = Field(
|
||||
description="Original file name",
|
||||
|
|
@ -78,7 +78,7 @@ class FileContentIndex(PowerOnModel):
|
|||
extractedAt: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="Extraction timestamp",
|
||||
json_schema_extra={"label": "Extrahiert am"},
|
||||
json_schema_extra={"label": "Extrahiert am", "frontend_type": "timestamp"},
|
||||
)
|
||||
status: str = Field(
|
||||
default="pending",
|
||||
|
|
@ -116,16 +116,16 @@ class ContentChunk(PowerOnModel):
|
|||
)
|
||||
fileId: str = Field(
|
||||
description="FK to the source file",
|
||||
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem"}},
|
||||
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"}},
|
||||
)
|
||||
userId: str = Field(
|
||||
description="Owner user ID",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
default="",
|
||||
description="Feature instance scope",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
contentType: str = Field(
|
||||
description="Content type: text, image, videostream, audiostream, other",
|
||||
|
|
@ -214,16 +214,16 @@ class WorkflowMemory(PowerOnModel):
|
|||
)
|
||||
workflowId: str = Field(
|
||||
description="FK to the workflow",
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||
)
|
||||
userId: str = Field(
|
||||
description="Owner user ID",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
default="",
|
||||
description="Feature instance scope",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
key: str = Field(
|
||||
description="Key identifier (e.g. 'entity:companyName')",
|
||||
|
|
|
|||
|
|
@ -31,10 +31,7 @@ class UserMandate(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_fk_source": "/api/users/",
|
||||
"frontend_fk_display_field": "username",
|
||||
"fk_model": "User",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
mandateId: str = Field(
|
||||
|
|
@ -44,10 +41,7 @@ class UserMandate(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_fk_source": "/api/mandates/",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_model": "Mandate",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
enabled: bool = Field(
|
||||
|
|
@ -75,9 +69,7 @@ class FeatureAccess(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_fk_source": "/api/users/",
|
||||
"frontend_fk_display_field": "username",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -87,9 +79,7 @@ class FeatureAccess(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_fk_source": "/api/features/instances",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
enabled: bool = Field(
|
||||
|
|
@ -117,7 +107,7 @@ class UserMandateRole(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "UserMandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserMandate", "labelField": None},
|
||||
},
|
||||
)
|
||||
roleId: str = Field(
|
||||
|
|
@ -127,9 +117,7 @@ class UserMandateRole(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_fk_source": "/api/rbac/roles",
|
||||
"frontend_fk_display_field": "roleLabel",
|
||||
"fk_target": {"db": "poweron_app", "table": "Role"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Role", "labelField": "roleLabel"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -152,7 +140,7 @@ class FeatureAccessRole(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureAccess"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureAccess", "labelField": None},
|
||||
},
|
||||
)
|
||||
roleId: str = Field(
|
||||
|
|
@ -162,8 +150,6 @@ class FeatureAccessRole(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_fk_source": "/api/rbac/roles",
|
||||
"frontend_fk_display_field": "roleLabel",
|
||||
"fk_target": {"db": "poweron_app", "table": "Role"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Role", "labelField": "roleLabel"},
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ class MessagingSubscription(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandanten-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -74,7 +74,7 @@ class MessagingSubscription(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Feature-Instanz-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
description: Optional[str] = Field(
|
||||
|
|
@ -131,7 +131,7 @@ class MessagingSubscriptionRegistration(BaseModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandanten-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -141,7 +141,7 @@ class MessagingSubscriptionRegistration(BaseModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Feature-Instanz-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
subscriptionId: str = Field(
|
||||
|
|
@ -160,7 +160,7 @@ class MessagingSubscriptionRegistration(BaseModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Benutzer-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
channel: MessagingChannel = Field(
|
||||
|
|
@ -249,7 +249,7 @@ class MessagingDelivery(BaseModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Benutzer-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
channel: MessagingChannel = Field(
|
||||
|
|
@ -296,7 +296,7 @@ class MessagingDelivery(BaseModel):
|
|||
default=None,
|
||||
description="When the delivery was sent (UTC timestamp in seconds)",
|
||||
json_schema_extra={
|
||||
"frontend_type": "datetime",
|
||||
"frontend_type": "timestamp",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Gesendet am",
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ class UserNotification(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -63,9 +63,7 @@ class Role(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_visible": True,
|
||||
"frontend_required": False,
|
||||
"frontend_fk_source": "/api/mandates/",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
|
|
@ -77,9 +75,7 @@ class Role(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_visible": True,
|
||||
"frontend_required": False,
|
||||
"frontend_fk_source": "/api/features/instances",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureCode: Optional[str] = Field(
|
||||
|
|
@ -115,9 +111,7 @@ class AccessRule(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"frontend_fk_source": "/api/rbac/roles",
|
||||
"frontend_fk_display_field": "roleLabel",
|
||||
"fk_target": {"db": "poweron_app", "table": "Role"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Role", "labelField": "roleLabel"},
|
||||
},
|
||||
)
|
||||
context: AccessRuleContext = Field(
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ class Token(PowerOnModel):
|
|||
)
|
||||
userId: str = Field(
|
||||
...,
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
authority: AuthAuthority = Field(
|
||||
...,
|
||||
|
|
@ -56,7 +56,7 @@ class Token(PowerOnModel):
|
|||
connectionId: Optional[str] = Field(
|
||||
None,
|
||||
description="ID of the connection this token belongs to",
|
||||
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection"}},
|
||||
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection", "labelField": "externalUsername"}},
|
||||
)
|
||||
tokenPurpose: Optional[TokenPurpose] = Field(
|
||||
default=None,
|
||||
|
|
@ -73,7 +73,7 @@ class Token(PowerOnModel):
|
|||
)
|
||||
expiresAt: float = Field(
|
||||
description="When the token expires (UTC timestamp in seconds)",
|
||||
json_schema_extra={"label": "Laeuft ab am"},
|
||||
json_schema_extra={"label": "Laeuft ab am", "frontend_type": "timestamp"},
|
||||
)
|
||||
tokenRefresh: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -87,12 +87,12 @@ class Token(PowerOnModel):
|
|||
revokedAt: Optional[float] = Field(
|
||||
None,
|
||||
description="When the token was revoked (UTC timestamp in seconds)",
|
||||
json_schema_extra={"label": "Widerrufen am"},
|
||||
json_schema_extra={"label": "Widerrufen am", "frontend_type": "timestamp"},
|
||||
)
|
||||
revokedBy: Optional[str] = Field(
|
||||
None,
|
||||
description="User ID who revoked the token (admin/self)",
|
||||
json_schema_extra={"label": "Widerrufen von", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Widerrufen von", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
reason: Optional[str] = Field(
|
||||
None,
|
||||
|
|
@ -139,7 +139,7 @@ class AuthEvent(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
eventType: str = Field(
|
||||
|
|
@ -149,7 +149,7 @@ class AuthEvent(PowerOnModel):
|
|||
timestamp: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="Unix timestamp when the event occurred",
|
||||
json_schema_extra={"label": "Zeitstempel", "frontend_type": "datetime", "frontend_readonly": True, "frontend_required": True},
|
||||
json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": True},
|
||||
)
|
||||
ipAddress: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
|
|||
|
|
@ -207,7 +207,7 @@ class MandateSubscription(PowerOnModel):
|
|||
mandateId: str = Field(
|
||||
...,
|
||||
description="Foreign key to Mandate",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
planKey: str = Field(
|
||||
...,
|
||||
|
|
@ -226,35 +226,35 @@ class MandateSubscription(PowerOnModel):
|
|||
json_schema_extra={"label": "Wiederkehrend"},
|
||||
)
|
||||
|
||||
startedAt: datetime = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc),
|
||||
description="Record creation timestamp",
|
||||
json_schema_extra={"label": "Gestartet"},
|
||||
startedAt: float = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc).timestamp(),
|
||||
description="Record creation timestamp (UTC unix)",
|
||||
json_schema_extra={"label": "Gestartet", "frontend_type": "timestamp"},
|
||||
)
|
||||
effectiveFrom: Optional[datetime] = Field(
|
||||
effectiveFrom: Optional[float] = Field(
|
||||
None,
|
||||
description="When this subscription becomes operative. None = immediate. Set for SCHEDULED subs.",
|
||||
json_schema_extra={"label": "Wirksam ab"},
|
||||
description="When this subscription becomes operative (UTC unix). None = immediate.",
|
||||
json_schema_extra={"label": "Wirksam ab", "frontend_type": "timestamp"},
|
||||
)
|
||||
endedAt: Optional[datetime] = Field(
|
||||
endedAt: Optional[float] = Field(
|
||||
None,
|
||||
description="When subscription ended (terminal)",
|
||||
json_schema_extra={"label": "Beendet"},
|
||||
description="When subscription ended (UTC unix)",
|
||||
json_schema_extra={"label": "Beendet", "frontend_type": "timestamp"},
|
||||
)
|
||||
currentPeriodStart: Optional[datetime] = Field(
|
||||
currentPeriodStart: Optional[float] = Field(
|
||||
None,
|
||||
description="Current billing period start (synced from Stripe)",
|
||||
json_schema_extra={"label": "Periodenbeginn"},
|
||||
description="Current billing period start (UTC unix, synced from Stripe)",
|
||||
json_schema_extra={"label": "Periodenbeginn", "frontend_type": "timestamp"},
|
||||
)
|
||||
currentPeriodEnd: Optional[datetime] = Field(
|
||||
currentPeriodEnd: Optional[float] = Field(
|
||||
None,
|
||||
description="Current billing period end (synced from Stripe)",
|
||||
json_schema_extra={"label": "Periodenende"},
|
||||
description="Current billing period end (UTC unix, synced from Stripe)",
|
||||
json_schema_extra={"label": "Periodenende", "frontend_type": "timestamp"},
|
||||
)
|
||||
trialEndsAt: Optional[datetime] = Field(
|
||||
trialEndsAt: Optional[float] = Field(
|
||||
None,
|
||||
description="Trial expiry timestamp",
|
||||
json_schema_extra={"label": "Trial endet"},
|
||||
description="Trial expiry timestamp (UTC unix)",
|
||||
json_schema_extra={"label": "Trial endet", "frontend_type": "timestamp"},
|
||||
)
|
||||
|
||||
snapshotPricePerUserCHF: float = Field(
|
||||
|
|
@ -407,7 +407,7 @@ BUILTIN_PLANS: Dict[str, SubscriptionPlan] = {
|
|||
}
|
||||
|
||||
|
||||
def _getPlan(planKey: str) -> Optional[SubscriptionPlan]:
|
||||
def getPlan(planKey: str) -> Optional[SubscriptionPlan]:
|
||||
"""Resolve a plan by key from the built-in catalog."""
|
||||
return BUILTIN_PLANS.get(planKey)
|
||||
|
||||
|
|
|
|||
|
|
@ -397,7 +397,7 @@ class UserConnection(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Benutzer-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
authority: AuthAuthority = Field(
|
||||
|
|
@ -646,11 +646,11 @@ class UserInDB(User):
|
|||
resetTokenExpires: Optional[float] = Field(
|
||||
None,
|
||||
description="Reset token expiration (UTC timestamp in seconds)",
|
||||
json_schema_extra={"label": "Token läuft ab"},
|
||||
json_schema_extra={"label": "Token läuft ab", "frontend_type": "timestamp"},
|
||||
)
|
||||
|
||||
|
||||
def _normalizeTtsVoiceMap(value: Any) -> Optional[Dict[str, str]]:
|
||||
def normalizeTtsVoiceMap(value: Any) -> Optional[Dict[str, str]]:
|
||||
"""
|
||||
Coerce ttsVoiceMap payloads to Dict[str, str].
|
||||
|
||||
|
|
@ -687,12 +687,12 @@ class UserVoicePreferences(PowerOnModel):
|
|||
)
|
||||
userId: str = Field(
|
||||
description="User ID",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate scope (None = global for user)",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
sttLanguage: str = Field(
|
||||
default="de-DE",
|
||||
|
|
@ -728,6 +728,6 @@ class UserVoicePreferences(PowerOnModel):
|
|||
@field_validator("ttsVoiceMap", mode="before")
|
||||
@classmethod
|
||||
def _validateTtsVoiceMap(cls, value: Any) -> Optional[Dict[str, str]]:
|
||||
return _normalizeTtsVoiceMap(value)
|
||||
return normalizeTtsVoiceMap(value)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -14,8 +14,8 @@ from modules.datamodels.datamodelExtraction import ContentExtracted, ContentPart
|
|||
class UdmMetadata(BaseModel):
|
||||
title: Optional[str] = None
|
||||
author: Optional[str] = None
|
||||
createdAt: Optional[str] = None
|
||||
modifiedAt: Optional[str] = None
|
||||
createdAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
modifiedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
sourcePath: str = ""
|
||||
tags: List[str] = Field(default_factory=list)
|
||||
custom: Dict[str, Any] = Field(default_factory=dict)
|
||||
|
|
@ -177,7 +177,7 @@ def _groupKeyForPart(part: ContentPart) -> Tuple[str, int, str]:
|
|||
_VALID_DOC_SOURCES = frozenset({"pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"})
|
||||
|
||||
|
||||
def _contentPartsToUdm(extracted: ContentExtracted, sourceType: str, sourcePath: str) -> UdmDocument:
|
||||
def contentPartsToUdm(extracted: ContentExtracted, sourceType: str, sourcePath: str) -> UdmDocument:
|
||||
"""Convert flat ContentPart list into a UdmDocument using structural heuristics."""
|
||||
parts = list(extracted.parts or [])
|
||||
st: Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"] = (
|
||||
|
|
@ -290,7 +290,7 @@ def _stripUdmForReferences(udm: UdmDocument) -> UdmDocument:
|
|||
return clone
|
||||
|
||||
|
||||
def _applyUdmOutputDetail(udm: UdmDocument, detail: str) -> UdmDocument:
|
||||
def applyUdmOutputDetail(udm: UdmDocument, detail: str) -> UdmDocument:
|
||||
if detail == "structure":
|
||||
return _stripUdmRaw(udm)
|
||||
if detail == "references":
|
||||
|
|
@ -298,7 +298,7 @@ def _applyUdmOutputDetail(udm: UdmDocument, detail: str) -> UdmDocument:
|
|||
return udm
|
||||
|
||||
|
||||
def _mimeToUdmSourceType(mimeType: str, fileName: str) -> Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"]:
|
||||
def mimeToUdmSourceType(mimeType: str, fileName: str) -> Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"]:
|
||||
m = (mimeType or "").lower()
|
||||
fn = (fileName or "").lower()
|
||||
if m == "application/pdf" or fn.endswith(".pdf"):
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ class Prompt(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
isSystem: bool = Field(
|
||||
|
|
|
|||
311
modules/datamodels/datamodelViews.py
Normal file
311
modules/datamodels/datamodelViews.py
Normal file
|
|
@ -0,0 +1,311 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
View models for the /api/attributes/ endpoint.
|
||||
|
||||
These extend base DB models with computed / enriched fields that the gateway
|
||||
adds at response time (JOINs, aggregations, synthetics). They are NEVER used
|
||||
for DB operations — only for ``getModelAttributeDefinitions()`` so the frontend
|
||||
can resolve column types via ``resolveColumnTypes`` without hardcoding.
|
||||
|
||||
Naming convention: ``{BaseModel}View``.
|
||||
|
||||
``getModelClasses()`` in ``attributeUtils.py`` auto-discovers every
|
||||
``datamodel*.py`` under ``modules/datamodels/`` — so placing them here is
|
||||
sufficient for registration.
|
||||
"""
|
||||
|
||||
from typing import Optional, List
|
||||
from pydantic import Field
|
||||
|
||||
from modules.datamodels.datamodelBase import MODEL_REGISTRY, PowerOnModel
|
||||
from modules.datamodels.datamodelMembership import UserMandate, FeatureAccess
|
||||
from modules.datamodels.datamodelBilling import BillingTransaction
|
||||
from modules.datamodels.datamodelSubscription import MandateSubscription
|
||||
from modules.datamodels.datamodelUiLanguage import UiLanguageSet
|
||||
from modules.datamodels.datamodelRbac import Role
|
||||
from modules.features.neutralization.datamodelFeatureNeutralizer import DataNeutralizerAttributes
|
||||
from modules.shared.i18nRegistry import i18nModel
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 1a: UserMandate + enriched user fields
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Benutzer-Mandant (Ansicht)")
|
||||
class UserMandateView(UserMandate):
|
||||
"""UserMandate erweitert um aufgeloeste Benutzerfelder und Rollenlabels."""
|
||||
|
||||
username: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Username (resolved from userId)",
|
||||
json_schema_extra={"label": "Benutzername", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
email: Optional[str] = Field(
|
||||
default=None,
|
||||
description="E-Mail address (resolved from userId)",
|
||||
json_schema_extra={"label": "E-Mail", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
fullName: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Full name (resolved from userId)",
|
||||
json_schema_extra={"label": "Vollstaendiger Name", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
roleLabels: Optional[List[str]] = Field(
|
||||
default=None,
|
||||
description="Role labels (resolved from junction table)",
|
||||
json_schema_extra={"label": "Rollen", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 1b: FeatureAccess + enriched user fields
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Feature-Zugang (Ansicht)")
|
||||
class FeatureAccessView(FeatureAccess):
|
||||
"""FeatureAccess erweitert um aufgeloeste Benutzerfelder und Rollenlabels."""
|
||||
|
||||
username: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Username (resolved from userId)",
|
||||
json_schema_extra={"label": "Benutzername", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
email: Optional[str] = Field(
|
||||
default=None,
|
||||
description="E-Mail address (resolved from userId)",
|
||||
json_schema_extra={"label": "E-Mail", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
fullName: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Full name (resolved from userId)",
|
||||
json_schema_extra={"label": "Vollstaendiger Name", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
roleLabels: Optional[List[str]] = Field(
|
||||
default=None,
|
||||
description="Role labels (resolved from junction table)",
|
||||
json_schema_extra={"label": "Rollen", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 1d: BillingTransaction + enriched mandate/user names
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Transaktion (Ansicht)")
|
||||
class BillingTransactionView(BillingTransaction):
|
||||
"""BillingTransaction erweitert um aufgeloeste Mandanten-/Benutzernamen."""
|
||||
|
||||
mandateName: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate name (resolved from accountId/mandateId)",
|
||||
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
userName: Optional[str] = Field(
|
||||
default=None,
|
||||
description="User name (resolved from createdByUserId)",
|
||||
json_schema_extra={"label": "Benutzer", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 3a: MandateSubscription + aggregated fields
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Abonnement (Ansicht)")
|
||||
class MandateSubscriptionView(MandateSubscription):
|
||||
"""MandateSubscription erweitert um aggregierte Laufzeitwerte."""
|
||||
|
||||
mandateName: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate name (resolved from mandateId)",
|
||||
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
planTitle: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Plan title (resolved from planKey)",
|
||||
json_schema_extra={"label": "Plan", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
activeUsers: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of active users in the mandate",
|
||||
json_schema_extra={"label": "Benutzer", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
activeInstances: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of active feature instances in the mandate",
|
||||
json_schema_extra={"label": "Module", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
monthlyRevenueCHF: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Calculated monthly revenue in CHF",
|
||||
json_schema_extra={"label": "Umsatz pro Monat", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 3b: UiLanguageSet + computed counts
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Sprachset (Ansicht)")
|
||||
class UiLanguageSetView(UiLanguageSet):
|
||||
"""UiLanguageSet erweitert um berechnete Uebersetzungszaehler."""
|
||||
|
||||
uiCount: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of UI translation entries",
|
||||
json_schema_extra={"label": "UI", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
gatewayCount: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of gateway/API translation entries",
|
||||
json_schema_extra={"label": "API", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
entriesCount: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Total number of translation entries",
|
||||
json_schema_extra={"label": "Gesamt", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 1c: DataNeutralizerAttributes + enriched fields
|
||||
#
|
||||
# DataNeutralizerAttributes extends BaseModel (not PowerOnModel), so its
|
||||
# subclass does NOT auto-register in MODEL_REGISTRY. We register manually.
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Neutralisierungs-Zuordnung (Ansicht)")
|
||||
class DataNeutralizerAttributesView(DataNeutralizerAttributes):
|
||||
"""DataNeutralizerAttributes erweitert um synthetische/aufgeloeste Felder."""
|
||||
|
||||
placeholder: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Synthetic placeholder string [patternType.id]",
|
||||
json_schema_extra={"label": "Platzhalter", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
username: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Username (resolved from userId)",
|
||||
json_schema_extra={"label": "Benutzer", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
instanceLabel: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature instance label (resolved from featureInstanceId)",
|
||||
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# Manual registration for non-PowerOnModel view
|
||||
MODEL_REGISTRY["DataNeutralizerAttributesView"] = DataNeutralizerAttributesView # type: ignore[assignment]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Role view — admin RBAC list with computed `scopeType` + `userCount`
|
||||
#
|
||||
# `scopeType` is computed in the route from (mandateId, isSystemRole). Exposed
|
||||
# here as a pure `select` field so the frontend renders the user-facing label
|
||||
# from `frontend_options` (no hardcoded mapping in the page).
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Rolle (Ansicht)")
|
||||
class RoleView(Role):
|
||||
"""Role extended with computed scope information for the admin UI."""
|
||||
|
||||
scopeType: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Computed scope: 'system' (template), 'global', or 'mandate'.",
|
||||
json_schema_extra={
|
||||
"label": "Geltungsbereich",
|
||||
"frontend_type": "select",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_options": [
|
||||
{"value": "system", "label": "System-Template"},
|
||||
{"value": "global", "label": "Template"},
|
||||
{"value": "mandate", "label": "Mandant"},
|
||||
],
|
||||
},
|
||||
)
|
||||
userCount: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of users assigned to this role (via UserMandateRole).",
|
||||
json_schema_extra={
|
||||
"label": "Benutzer",
|
||||
"frontend_type": "number",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Automation Workflow — dashboard view with synthesized fields
|
||||
# ============================================================================
|
||||
|
||||
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import AutoWorkflow
|
||||
|
||||
|
||||
@i18nModel("Workflow (Ansicht)")
|
||||
class Automation2WorkflowView(AutoWorkflow):
|
||||
"""AutoWorkflow extended with computed dashboard fields.
|
||||
|
||||
Used exclusively for /api/attributes/ so the frontend can resolve column
|
||||
types for the workflow dashboard table (FormGeneratorTable).
|
||||
"""
|
||||
|
||||
sysCreatedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Record creation timestamp (UTC)",
|
||||
json_schema_extra={
|
||||
"label": "Erstellt",
|
||||
"frontend_type": "timestamp",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
lastStartedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Timestamp of the most recent workflow run start",
|
||||
json_schema_extra={
|
||||
"label": "Zuletzt gestartet",
|
||||
"frontend_type": "timestamp",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
runCount: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Total number of runs for this workflow",
|
||||
json_schema_extra={
|
||||
"label": "Laeufe",
|
||||
"frontend_type": "number",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
mandateLabel: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate name (resolved from mandateId)",
|
||||
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
instanceLabel: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature instance label (resolved from featureInstanceId)",
|
||||
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
featureCode: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature code of the owning instance",
|
||||
json_schema_extra={"label": "Feature", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
isRunning: Optional[bool] = Field(
|
||||
default=None,
|
||||
description="Whether the workflow currently has an active run",
|
||||
json_schema_extra={
|
||||
"label": "Läuft",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": True,
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
|
|
@ -22,9 +22,24 @@ class WorkflowActionParameter(BaseModel):
|
|||
json_schema_extra={"label": "Name"},
|
||||
)
|
||||
type: str = Field(
|
||||
description="Python type as string: 'str', 'int', 'bool', 'List[str]', etc.",
|
||||
description=(
|
||||
"Type reference. Either a primitive ('str', 'int', 'bool', 'float', 'Any', "
|
||||
"'List[str]', 'Dict[str,Any]', …) or a PORT_TYPE_CATALOG schema name "
|
||||
"(e.g. 'ConnectionRef', 'FeatureInstanceRef', 'DocumentList', "
|
||||
"'TrusteeProcessResult'). Catalog types are validated by "
|
||||
"_actionSignatureValidator at startup."
|
||||
),
|
||||
json_schema_extra={"label": "Typ"},
|
||||
)
|
||||
uiHint: Optional[str] = Field(
|
||||
None,
|
||||
description=(
|
||||
"Optional UI rendering hint for adapters. "
|
||||
"Free-form (e.g. 'textarea', 'cron', 'fieldBuilder'). "
|
||||
"Adapters can override; defaults derive from frontendType when absent."
|
||||
),
|
||||
json_schema_extra={"label": "UI-Hinweis"},
|
||||
)
|
||||
frontendType: FrontendType = Field(
|
||||
description="UI rendering type (from global FrontendType enum)",
|
||||
json_schema_extra={"label": "Frontend-Typ"},
|
||||
|
|
@ -80,6 +95,16 @@ class WorkflowActionDefinition(BaseModel):
|
|||
description="Parameter schema definitions",
|
||||
json_schema_extra={"label": "Parameter"},
|
||||
)
|
||||
outputType: str = Field(
|
||||
"ActionResult",
|
||||
description=(
|
||||
"PORT_TYPE_CATALOG schema name produced by this action "
|
||||
"(e.g. 'TrusteeProcessResult', 'EmailDraft', 'DocumentList'). "
|
||||
"Defaults to 'ActionResult' for fire-and-forget actions. "
|
||||
"Validated by _actionSignatureValidator at startup."
|
||||
),
|
||||
json_schema_extra={"label": "Ausgabe-Typ"},
|
||||
)
|
||||
execute: Optional[Callable] = Field(
|
||||
None,
|
||||
description="Execution function - async function that takes parameters dict and returns ActionResult. Set dynamically.",
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
Demo Configs — Auto-Discovery Module
|
||||
|
||||
Scans this folder for Python files that contain subclasses of _BaseDemoConfig
|
||||
and exposes them via _getAvailableDemoConfigs().
|
||||
and exposes them via getAvailableDemoConfigs().
|
||||
"""
|
||||
|
||||
import importlib
|
||||
|
|
@ -18,7 +18,7 @@ logger = logging.getLogger(__name__)
|
|||
_configCache: Dict[str, _BaseDemoConfig] = {}
|
||||
|
||||
|
||||
def _getAvailableDemoConfigs() -> Dict[str, _BaseDemoConfig]:
|
||||
def getAvailableDemoConfigs() -> Dict[str, _BaseDemoConfig]:
|
||||
"""Return a dict of code -> instance for every discovered demo config."""
|
||||
if _configCache:
|
||||
return _configCache
|
||||
|
|
@ -43,7 +43,7 @@ def _getAvailableDemoConfigs() -> Dict[str, _BaseDemoConfig]:
|
|||
return _configCache
|
||||
|
||||
|
||||
def _getDemoConfigByCode(code: str) -> _BaseDemoConfig | None:
|
||||
def getDemoConfigByCode(code: str) -> _BaseDemoConfig | None:
|
||||
"""Get a specific demo config by its code."""
|
||||
configs = _getAvailableDemoConfigs()
|
||||
configs = getAvailableDemoConfigs()
|
||||
return configs.get(code)
|
||||
|
|
|
|||
|
|
@ -4,11 +4,16 @@ Base class for demo configurations.
|
|||
Each demo config file in this folder extends _BaseDemoConfig and provides
|
||||
idempotent load() and remove() methods for setting up / tearing down
|
||||
a complete demo environment (mandates, users, features, test data, etc.).
|
||||
|
||||
Subclasses MUST also declare ``credentials`` so the SysAdmin who triggers a
|
||||
demo-load gets the initial username / password pair shown in the UI -- this
|
||||
avoids the "where do I find the password?" anti-pattern of having to grep the
|
||||
source code.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, Any
|
||||
from typing import Any, Dict, List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -20,6 +25,13 @@ class _BaseDemoConfig(ABC):
|
|||
label: str = ""
|
||||
description: str = ""
|
||||
|
||||
# Each entry describes one bootstrapped login that the demo creates.
|
||||
# Shape: {"role": "Demo-Sachbearbeiter", "username": "pwg.demo",
|
||||
# "email": "pwg.demo@poweron.swiss", "password": "pwg.demo.2026"}
|
||||
# Surfaced via GET /api/admin/demo-config and inside the load() summary
|
||||
# so the AdminDemoConfigPage can display it (no source-code grep needed).
|
||||
credentials: List[Dict[str, str]] = []
|
||||
|
||||
@abstractmethod
|
||||
def load(self, db) -> Dict[str, Any]:
|
||||
"""Create all demo data (idempotent). Returns summary dict."""
|
||||
|
|
@ -35,4 +47,5 @@ class _BaseDemoConfig(ABC):
|
|||
"code": self.code,
|
||||
"label": self.label,
|
||||
"description": self.description,
|
||||
"credentials": list(self.credentials or []),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -64,6 +64,14 @@ class InvestorDemo2026(_BaseDemoConfig):
|
|||
"Two mandates (HappyLife AG + Alpina Treuhand AG), one SysAdmin user, "
|
||||
"trustee with RMA, workspace, graph editor, and neutralization."
|
||||
)
|
||||
credentials = [
|
||||
{
|
||||
"role": "SysAdmin Demo",
|
||||
"username": _USER["username"],
|
||||
"email": _USER["email"],
|
||||
"password": _USER["password"],
|
||||
}
|
||||
]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# load
|
||||
|
|
@ -101,6 +109,10 @@ class InvestorDemo2026(_BaseDemoConfig):
|
|||
logger.error(f"Demo load failed: {e}", exc_info=True)
|
||||
summary["errors"].append(str(e))
|
||||
|
||||
# Surface initial credentials so the SysAdmin doesn't have to grep the
|
||||
# source code -- consumed by AdminDemoConfigPage to render a copyable
|
||||
# login box in the result banner.
|
||||
summary["credentials"] = list(self.credentials)
|
||||
return summary
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
|
|
@ -268,10 +280,17 @@ class InvestorDemo2026(_BaseDemoConfig):
|
|||
logger.error(f"Failed to create feature '{instanceLabel}' ({code}) in {mandateLabel}: {e}")
|
||||
|
||||
def _ensureFeatureAccess(self, db, userId: str, mandateId: str, mandateLabel: str, summary: Dict):
|
||||
"""Grant the demo user admin access to every feature instance in the mandate."""
|
||||
"""Grant the demo user admin access on EVERY feature instance of the
|
||||
mandate. Without an explicit ``FeatureAccess`` + ``{code}-admin`` role
|
||||
the user does not see any feature tile in the UI -- so this method
|
||||
ALSO heals a half-broken state by re-copying the per-feature template
|
||||
roles if they are missing (e.g. when the instance was created via an
|
||||
older code path that skipped ``copyTemplateRoles``).
|
||||
"""
|
||||
from modules.datamodels.datamodelFeatures import FeatureInstance
|
||||
from modules.datamodels.datamodelMembership import FeatureAccess, FeatureAccessRole
|
||||
from modules.datamodels.datamodelRbac import Role
|
||||
from modules.interfaces.interfaceFeatures import getFeatureInterface
|
||||
|
||||
instances = db.getRecordset(FeatureInstance, recordFilter={"mandateId": mandateId}) or []
|
||||
|
||||
|
|
@ -297,7 +316,38 @@ class InvestorDemo2026(_BaseDemoConfig):
|
|||
"featureInstanceId": instId,
|
||||
"roleLabel": adminRoleLabel,
|
||||
})
|
||||
if adminRoles:
|
||||
|
||||
# Self-heal: if the per-feature admin role does not exist on this
|
||||
# instance the template roles were never copied -- copy them now.
|
||||
if not adminRoles:
|
||||
logger.warning(
|
||||
"Feature instance %s (%s) is missing role '%s' -- "
|
||||
"re-copying template roles", instId, featureCode, adminRoleLabel,
|
||||
)
|
||||
try:
|
||||
fi = getFeatureInterface(db)
|
||||
fi._copyTemplateRoles(featureCode, mandateId, instId)
|
||||
summary["created"].append(
|
||||
f"Repaired template roles for {featureCode} in {mandateLabel}"
|
||||
)
|
||||
except Exception as repairErr:
|
||||
summary["errors"].append(
|
||||
f"Could not repair template roles for {featureCode} "
|
||||
f"in {mandateLabel}: {repairErr}"
|
||||
)
|
||||
adminRoles = db.getRecordset(Role, recordFilter={
|
||||
"featureInstanceId": instId,
|
||||
"roleLabel": adminRoleLabel,
|
||||
})
|
||||
|
||||
if not adminRoles:
|
||||
summary["errors"].append(
|
||||
f"Admin role '{adminRoleLabel}' not found for feature "
|
||||
f"instance {featureCode} in {mandateLabel} -- demo user "
|
||||
f"will not see this feature."
|
||||
)
|
||||
continue
|
||||
|
||||
adminRoleId = adminRoles[0].get("id")
|
||||
existingRole = db.getRecordset(FeatureAccessRole, recordFilter={
|
||||
"featureAccessId": featureAccessId,
|
||||
|
|
@ -306,6 +356,9 @@ class InvestorDemo2026(_BaseDemoConfig):
|
|||
if not existingRole:
|
||||
far = FeatureAccessRole(featureAccessId=featureAccessId, roleId=adminRoleId)
|
||||
db.recordCreate(FeatureAccessRole, far)
|
||||
summary["created"].append(
|
||||
f"Role '{adminRoleLabel}' assigned to demo user in {mandateLabel}"
|
||||
)
|
||||
logger.info(f"Assigned {adminRoleLabel} role in {mandateLabel}")
|
||||
|
||||
def _ensureTrusteeRmaConfig(self, db, mandateId: Optional[str], mandateLabel: str, summary: Dict):
|
||||
|
|
@ -394,10 +447,10 @@ class InvestorDemo2026(_BaseDemoConfig):
|
|||
if not mandateId:
|
||||
return
|
||||
try:
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
||||
from modules.interfaces.interfaceDbBilling import getRootInterface
|
||||
from modules.datamodels.datamodelBilling import BillingSettings
|
||||
|
||||
billingInterface = _getRootInterface()
|
||||
billingInterface = getRootInterface()
|
||||
existingSettings = billingInterface.getSettings(mandateId)
|
||||
if existingSettings:
|
||||
summary["skipped"].append(f"Billing for {mandateLabel} exists")
|
||||
|
|
@ -479,8 +532,8 @@ class InvestorDemo2026(_BaseDemoConfig):
|
|||
summary["removed"].append(f"{len(roles)} roles in {mandateLabel}")
|
||||
|
||||
try:
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
||||
billingDb = _getRootInterface().db
|
||||
from modules.interfaces.interfaceDbBilling import getRootInterface
|
||||
billingDb = getRootInterface().db
|
||||
billingSettings = billingDb.getRecordset(BillingSettings, recordFilter={"mandateId": mandateId}) or []
|
||||
for bs in billingSettings:
|
||||
billingDb.recordDelete(BillingSettings, bs.get("id"))
|
||||
|
|
|
|||
|
|
@ -67,6 +67,14 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
"Graph-Editor mit dem Pilot-Workflow für Jahresmietzinsbestätigungen "
|
||||
"(als File importiert, active=false). Idempotent."
|
||||
)
|
||||
credentials = [
|
||||
{
|
||||
"role": "Demo-Sachbearbeiter",
|
||||
"username": _USER["username"],
|
||||
"email": _USER["email"],
|
||||
"password": _USER["password"],
|
||||
}
|
||||
]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# load
|
||||
|
|
@ -98,6 +106,10 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
logger.error(f"PWG demo load failed: {e}", exc_info=True)
|
||||
summary["errors"].append(str(e))
|
||||
|
||||
# Surface initial credentials so the SysAdmin doesn't have to grep the
|
||||
# source code -- consumed by AdminDemoConfigPage to render a copyable
|
||||
# login box in the result banner.
|
||||
summary["credentials"] = list(self.credentials)
|
||||
return summary
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
|
|
@ -253,9 +265,17 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
summary["errors"].append(f"Feature '{instanceLabel}' in {mandateLabel}: {e}")
|
||||
|
||||
def _ensureFeatureAccess(self, db, userId: str, mandateId: str, mandateLabel: str, summary: Dict):
|
||||
"""Grant the demo user admin access on EVERY feature instance of the
|
||||
mandate. Without an explicit ``FeatureAccess`` + ``{code}-admin`` role
|
||||
the user does not see any feature tile in the UI -- so this method
|
||||
ALSO heals a half-broken state by re-copying the per-feature template
|
||||
roles if they are missing (e.g. when the instance was created via an
|
||||
older code path that skipped ``copyTemplateRoles``).
|
||||
"""
|
||||
from modules.datamodels.datamodelFeatures import FeatureInstance
|
||||
from modules.datamodels.datamodelMembership import FeatureAccess, FeatureAccessRole
|
||||
from modules.datamodels.datamodelRbac import Role
|
||||
from modules.interfaces.interfaceFeatures import getFeatureInterface
|
||||
|
||||
instances = db.getRecordset(FeatureInstance, recordFilter={"mandateId": mandateId}) or []
|
||||
|
||||
|
|
@ -280,7 +300,40 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
"featureInstanceId": instId,
|
||||
"roleLabel": adminRoleLabel,
|
||||
})
|
||||
if adminRoles:
|
||||
|
||||
# Self-heal: if the per-feature admin role does not exist on this
|
||||
# instance the template roles were never copied -- copy them now.
|
||||
if not adminRoles:
|
||||
logger.warning(
|
||||
"Feature instance %s (%s) is missing role '%s' -- "
|
||||
"re-copying template roles", instId, featureCode, adminRoleLabel,
|
||||
)
|
||||
try:
|
||||
fi = getFeatureInterface(db)
|
||||
fi._copyTemplateRoles(featureCode, mandateId, instId)
|
||||
summary["created"].append(
|
||||
f"Repaired template roles for {featureCode} in {mandateLabel}"
|
||||
)
|
||||
except Exception as repairErr:
|
||||
summary["errors"].append(
|
||||
f"Could not repair template roles for {featureCode} "
|
||||
f"in {mandateLabel}: {repairErr}"
|
||||
)
|
||||
adminRoles = db.getRecordset(Role, recordFilter={
|
||||
"featureInstanceId": instId,
|
||||
"roleLabel": adminRoleLabel,
|
||||
})
|
||||
|
||||
if not adminRoles:
|
||||
# Hard fail surfaced to UI -- without the admin role the user
|
||||
# would silently not see the instance.
|
||||
summary["errors"].append(
|
||||
f"Admin role '{adminRoleLabel}' not found for feature "
|
||||
f"instance {featureCode} in {mandateLabel} -- demo user "
|
||||
f"will not see this feature."
|
||||
)
|
||||
continue
|
||||
|
||||
adminRoleId = adminRoles[0].get("id")
|
||||
existingRole = db.getRecordset(FeatureAccessRole, recordFilter={
|
||||
"featureAccessId": featureAccessId,
|
||||
|
|
@ -289,6 +342,9 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
if not existingRole:
|
||||
far = FeatureAccessRole(featureAccessId=featureAccessId, roleId=adminRoleId)
|
||||
db.recordCreate(FeatureAccessRole, far)
|
||||
summary["created"].append(
|
||||
f"Role '{adminRoleLabel}' assigned to demo user in {mandateLabel}"
|
||||
)
|
||||
|
||||
def _ensureNeutralizationConfig(self, db, mandateId: Optional[str], userId: Optional[str], summary: Dict):
|
||||
if not mandateId or not userId:
|
||||
|
|
@ -321,9 +377,9 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
return
|
||||
try:
|
||||
from modules.datamodels.datamodelBilling import BillingSettings
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
||||
from modules.interfaces.interfaceDbBilling import getRootInterface
|
||||
|
||||
billingInterface = _getRootInterface()
|
||||
billingInterface = getRootInterface()
|
||||
existingSettings = billingInterface.getSettings(mandateId)
|
||||
if existingSettings:
|
||||
summary["skipped"].append(f"Billing for {mandateLabel} exists")
|
||||
|
|
@ -447,11 +503,12 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
if monthlyRent <= 0:
|
||||
continue
|
||||
for month in range(1, 13):
|
||||
bookingDate = f"{year}-{month:02d}-01"
|
||||
from datetime import datetime as _dtCls, timezone as _tzCls
|
||||
bookingTs = _dtCls(year, month, 1, tzinfo=_tzCls.utc).timestamp()
|
||||
entryRef = f"PWG-{tenant.get('contactNumber')}-{year}{month:02d}"
|
||||
entry = TrusteeDataJournalEntry(
|
||||
externalId=entryRef,
|
||||
bookingDate=bookingDate,
|
||||
bookingDate=bookingTs,
|
||||
reference=entryRef,
|
||||
description=f"Mietzins {month:02d}/{year} {name}",
|
||||
currency="CHF",
|
||||
|
|
@ -652,8 +709,8 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
db.recordDelete(Role, role.get("id"))
|
||||
|
||||
try:
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
||||
billingDb = _getRootInterface().db
|
||||
from modules.interfaces.interfaceDbBilling import getRootInterface
|
||||
billingDb = getRootInterface().db
|
||||
billingSettings = billingDb.getRecordset(BillingSettings, recordFilter={"mandateId": mandateId}) or []
|
||||
for bs in billingSettings:
|
||||
billingDb.recordDelete(BillingSettings, bs.get("id"))
|
||||
|
|
|
|||
|
|
@ -139,13 +139,13 @@ def storeDebugMessageAndDocuments(message, currentUser, mandateId=None, featureI
|
|||
try:
|
||||
import os
|
||||
from datetime import datetime, UTC
|
||||
from modules.shared.debugLogger import _getBaseDebugDir, _ensureDir
|
||||
from modules.shared.debugLogger import getBaseDebugDir, ensureDir
|
||||
from modules.interfaces.interfaceDbManagement import getInterface
|
||||
|
||||
# Create base debug directory (use base debug dir, not prompts subdirectory)
|
||||
baseDebugDir = _getBaseDebugDir()
|
||||
baseDebugDir = getBaseDebugDir()
|
||||
debug_root = os.path.join(baseDebugDir, 'messages')
|
||||
_ensureDir(debug_root)
|
||||
ensureDir(debug_root)
|
||||
|
||||
# Generate timestamp
|
||||
timestamp = datetime.now(UTC).strftime('%Y%m%d-%H%M%S-%f')[:-3]
|
||||
|
|
@ -210,7 +210,7 @@ def storeDebugMessageAndDocuments(message, currentUser, mandateId=None, featureI
|
|||
safe_label = "default"
|
||||
|
||||
label_folder = os.path.join(message_path, safe_label)
|
||||
_ensureDir(label_folder)
|
||||
ensureDir(label_folder)
|
||||
|
||||
# Store each document
|
||||
for i, doc in enumerate(docs):
|
||||
|
|
@ -401,8 +401,8 @@ class ChatObjects:
|
|||
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
|
||||
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
|
||||
|
||||
from modules.connectors.connectorDbPostgre import _get_cached_connector
|
||||
self.db = _get_cached_connector(
|
||||
from modules.connectors.connectorDbPostgre import getCachedConnector
|
||||
self.db = getCachedConnector(
|
||||
dbHost=dbHost,
|
||||
dbDatabase=dbDatabase,
|
||||
dbUser=dbUser,
|
||||
|
|
|
|||
|
|
@ -35,17 +35,6 @@ from modules.features.chatbot.mainChatbot import getEventManager
|
|||
from modules.shared.i18nRegistry import apiRouteContext
|
||||
routeApiMsg = apiRouteContext("routeFeatureChatbot")
|
||||
|
||||
# Pre-warm AI connectors when this router loads (before first request).
|
||||
# Ensures connectors are ready; avoids 4–8 s delay on first chatbot message.
|
||||
try:
|
||||
import modules.aicore.aicoreModelRegistry # noqa: F401
|
||||
from modules.aicore.aicoreModelRegistry import modelRegistry
|
||||
modelRegistry.ensureConnectorsRegistered()
|
||||
modelRegistry.refreshModels(force=True)
|
||||
logging.getLogger(__name__).info("Chatbot router: AI connectors pre-warmed")
|
||||
except Exception as e:
|
||||
logging.getLogger(__name__).warning(f"Chatbot AI pre-warm failed: {e}")
|
||||
|
||||
# Configure logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -204,19 +193,20 @@ def get_chatbot_threads(
|
|||
normalized_wf["maxSteps"] = 10
|
||||
normalized_workflows.append(normalized_wf)
|
||||
|
||||
metadata = PaginationMetadata(
|
||||
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||
enriched = enrichRowsWithFkLabels(normalized_workflows, ChatbotConversation)
|
||||
|
||||
return {
|
||||
"items": enriched,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page if paginationParams else 1,
|
||||
pageSize=paginationParams.pageSize if paginationParams else len(workflows),
|
||||
totalItems=totalItems,
|
||||
totalPages=totalPages,
|
||||
sort=paginationParams.sort if paginationParams else [],
|
||||
filters=paginationParams.filters if paginationParams else None
|
||||
)
|
||||
|
||||
return PaginatedResponse(
|
||||
items=normalized_workflows,
|
||||
pagination=metadata
|
||||
)
|
||||
).model_dump(),
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ class CoachingContext(PowerOnModel):
|
|||
metadata: Optional[str] = Field(default=None, description="JSON object with flexible metadata")
|
||||
sessionCount: int = Field(default=0)
|
||||
taskCount: int = Field(default=0)
|
||||
lastSessionAt: Optional[str] = Field(default=None)
|
||||
lastSessionAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
rollingOverview: Optional[str] = Field(default=None, description="AI summary of older sessions for long context history")
|
||||
rollingOverviewUpToSessionCount: Optional[int] = Field(default=None, description="Session count covered by rollingOverview")
|
||||
|
||||
|
|
@ -113,8 +113,8 @@ class CoachingSession(PowerOnModel):
|
|||
messageCount: int = Field(default=0)
|
||||
competenceScore: Optional[float] = Field(default=None, ge=0.0, le=100.0)
|
||||
emailSent: bool = Field(default=False)
|
||||
startedAt: Optional[str] = Field(default=None)
|
||||
endedAt: Optional[str] = Field(default=None)
|
||||
startedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
endedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
|
||||
class CoachingMessage(PowerOnModel):
|
||||
|
|
@ -141,8 +141,8 @@ class CoachingTask(PowerOnModel):
|
|||
description: Optional[str] = Field(default=None)
|
||||
status: CoachingTaskStatus = Field(default=CoachingTaskStatus.OPEN)
|
||||
priority: CoachingTaskPriority = Field(default=CoachingTaskPriority.MEDIUM)
|
||||
dueDate: Optional[str] = Field(default=None)
|
||||
completedAt: Optional[str] = Field(default=None)
|
||||
dueDate: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "date"})
|
||||
completedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
|
||||
class CoachingScore(PowerOnModel):
|
||||
|
|
@ -171,7 +171,7 @@ class CoachingUserProfile(PowerOnModel):
|
|||
longestStreak: int = Field(default=0)
|
||||
totalSessions: int = Field(default=0)
|
||||
totalMinutes: int = Field(default=0)
|
||||
lastSessionAt: Optional[str] = Field(default=None)
|
||||
lastSessionAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
|
||||
# ============================================================================
|
||||
|
|
@ -204,7 +204,7 @@ class CoachingBadge(PowerOnModel):
|
|||
mandateId: str = Field(description="Mandate ID")
|
||||
instanceId: str = Field(description="Feature instance ID")
|
||||
badgeKey: str = Field(description="Badge identifier, e.g. 'streak_7'")
|
||||
awardedAt: Optional[str] = Field(default=None)
|
||||
awardedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
|
||||
# ============================================================================
|
||||
|
|
@ -238,14 +238,14 @@ class CreateTaskRequest(BaseModel):
|
|||
title: str
|
||||
description: Optional[str] = None
|
||||
priority: Optional[CoachingTaskPriority] = CoachingTaskPriority.MEDIUM
|
||||
dueDate: Optional[str] = None
|
||||
dueDate: Optional[float] = None
|
||||
|
||||
|
||||
class UpdateTaskRequest(BaseModel):
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
priority: Optional[CoachingTaskPriority] = None
|
||||
dueDate: Optional[str] = None
|
||||
dueDate: Optional[float] = None
|
||||
|
||||
|
||||
class UpdateTaskStatusRequest(BaseModel):
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ from typing import Dict, Any, List, Optional
|
|||
from modules.datamodels.datamodelUam import User
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
from modules.shared.dbRegistry import registerDatabase
|
||||
from modules.shared.timeUtils import getIsoTimestamp
|
||||
from modules.shared.timeUtils import getIsoTimestamp, getUtcTimestamp
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
from modules.shared.i18nRegistry import resolveText, t
|
||||
|
||||
|
|
@ -112,7 +112,7 @@ class CommcoachObjects:
|
|||
CoachingSession,
|
||||
recordFilter={"contextId": contextId, "userId": userId},
|
||||
)
|
||||
records.sort(key=lambda r: r.get("startedAt") or r.get("createdAt") or "", reverse=True)
|
||||
records.sort(key=lambda r: r.get("startedAt") or 0, reverse=True)
|
||||
return records
|
||||
|
||||
def getSession(self, sessionId: str) -> Optional[Dict[str, Any]]:
|
||||
|
|
@ -129,7 +129,7 @@ class CommcoachObjects:
|
|||
def createSession(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
data["createdAt"] = getIsoTimestamp()
|
||||
data["updatedAt"] = getIsoTimestamp()
|
||||
data["startedAt"] = getIsoTimestamp()
|
||||
data["startedAt"] = getUtcTimestamp()
|
||||
return self.db.recordCreate(CoachingSession, data)
|
||||
|
||||
def updateSession(self, sessionId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
|
|
@ -281,7 +281,7 @@ class CommcoachObjects:
|
|||
def getBadges(self, userId: str, instanceId: str) -> List[Dict[str, Any]]:
|
||||
from .datamodelCommcoach import CoachingBadge
|
||||
records = self.db.getRecordset(CoachingBadge, recordFilter={"userId": userId, "instanceId": instanceId})
|
||||
records.sort(key=lambda r: r.get("awardedAt") or "", reverse=True)
|
||||
records.sort(key=lambda r: r.get("awardedAt") or 0, reverse=True)
|
||||
return records
|
||||
|
||||
def hasBadge(self, userId: str, instanceId: str, badgeKey: str) -> bool:
|
||||
|
|
@ -291,7 +291,7 @@ class CommcoachObjects:
|
|||
|
||||
def awardBadge(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
from .datamodelCommcoach import CoachingBadge
|
||||
data["awardedAt"] = getIsoTimestamp()
|
||||
data["awardedAt"] = getUtcTimestamp()
|
||||
data["createdAt"] = getIsoTimestamp()
|
||||
return self.db.recordCreate(CoachingBadge, data)
|
||||
|
||||
|
|
|
|||
|
|
@ -336,10 +336,10 @@ async def startSession(
|
|||
try:
|
||||
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
||||
voiceInterface = getVoiceInterface(context.user, mandateId)
|
||||
from .serviceCommcoach import _getUserVoicePrefs, _stripMarkdownForTts, _buildTtsConfigErrorMessage
|
||||
language, voiceName = _getUserVoicePrefs(userId, mandateId)
|
||||
from .serviceCommcoach import getUserVoicePrefs, stripMarkdownForTts, buildTtsConfigErrorMessage
|
||||
language, voiceName = getUserVoicePrefs(userId, mandateId)
|
||||
ttsResult = await voiceInterface.textToSpeech(
|
||||
text=_stripMarkdownForTts(greetingText),
|
||||
text=stripMarkdownForTts(greetingText),
|
||||
languageCode=language,
|
||||
voiceName=voiceName,
|
||||
)
|
||||
|
|
@ -471,10 +471,10 @@ async def cancelSession(
|
|||
raise HTTPException(status_code=404, detail=routeApiMsg("Session not found"))
|
||||
_validateOwnership(session, context)
|
||||
|
||||
from modules.shared.timeUtils import getIsoTimestamp
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
interface.updateSession(sessionId, {
|
||||
"status": CoachingSessionStatus.CANCELLED.value,
|
||||
"endedAt": getIsoTimestamp(),
|
||||
"endedAt": getUtcTimestamp(),
|
||||
})
|
||||
return {"cancelled": True}
|
||||
|
||||
|
|
@ -584,8 +584,8 @@ async def sendAudioStream(
|
|||
if not audioBody:
|
||||
raise HTTPException(status_code=400, detail=routeApiMsg("No audio data received"))
|
||||
|
||||
from .serviceCommcoach import _getUserVoicePrefs
|
||||
language, _ = _getUserVoicePrefs(str(context.user.id), mandateId)
|
||||
from .serviceCommcoach import getUserVoicePrefs
|
||||
language, _ = getUserVoicePrefs(str(context.user.id), mandateId)
|
||||
|
||||
contextId = session.get("contextId")
|
||||
service = CommcoachService(context.user, mandateId, instanceId)
|
||||
|
|
@ -768,8 +768,8 @@ async def updateTaskStatus(
|
|||
|
||||
updates = {"status": body.status.value}
|
||||
if body.status == CoachingTaskStatus.DONE:
|
||||
from modules.shared.timeUtils import getIsoTimestamp
|
||||
updates["completedAt"] = getIsoTimestamp()
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
updates["completedAt"] = getUtcTimestamp()
|
||||
|
||||
updated = interface.updateTask(taskId, updates)
|
||||
return {"task": updated}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ from typing import Optional, Dict, Any, List
|
|||
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
|
||||
from modules.shared.timeUtils import getIsoTimestamp
|
||||
from modules.shared.timeUtils import getIsoTimestamp, getUtcTimestamp
|
||||
|
||||
from .datamodelCommcoach import (
|
||||
CoachingMessage, CoachingMessageRole, CoachingMessageContentType,
|
||||
|
|
@ -79,7 +79,7 @@ def _selectConfiguredVoice(
|
|||
return None
|
||||
|
||||
|
||||
def _buildTtsConfigErrorMessage(language: str, voiceName: Optional[str], rawError: str = "") -> str:
|
||||
def buildTtsConfigErrorMessage(language: str, voiceName: Optional[str], rawError: str = "") -> str:
|
||||
if voiceName:
|
||||
return (
|
||||
f'Die konfigurierte Stimme "{voiceName}" für {language} ist ungültig oder nicht verfügbar. '
|
||||
|
|
@ -91,7 +91,7 @@ def _buildTtsConfigErrorMessage(language: str, voiceName: Optional[str], rawErro
|
|||
)
|
||||
|
||||
|
||||
def _getUserVoicePrefs(userId: str, mandateId: Optional[str] = None) -> tuple:
|
||||
def getUserVoicePrefs(userId: str, mandateId: Optional[str] = None) -> tuple:
|
||||
"""Load voice language and voiceName from central UserVoicePreferences.
|
||||
Returns (language, voiceName) tuple."""
|
||||
try:
|
||||
|
|
@ -160,7 +160,7 @@ def _getUserVoicePrefs(userId: str, mandateId: Optional[str] = None) -> tuple:
|
|||
return ("de-DE", None)
|
||||
|
||||
|
||||
def _stripMarkdownForTts(text: str) -> str:
|
||||
def stripMarkdownForTts(text: str) -> str:
|
||||
"""Strip markdown formatting so TTS reads clean speech text."""
|
||||
t = text
|
||||
t = re.sub(r'\*\*(.+?)\*\*', r'\1', t)
|
||||
|
|
@ -346,9 +346,9 @@ async def _generateAndEmitTts(sessionId: str, speechText: str, currentUser, mand
|
|||
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
||||
import base64
|
||||
voiceInterface = getVoiceInterface(currentUser, mandateId)
|
||||
language, voiceName = _getUserVoicePrefs(str(currentUser.id), mandateId)
|
||||
language, voiceName = getUserVoicePrefs(str(currentUser.id), mandateId)
|
||||
ttsResult = await voiceInterface.textToSpeech(
|
||||
text=_stripMarkdownForTts(speechText),
|
||||
text=stripMarkdownForTts(speechText),
|
||||
languageCode=language,
|
||||
voiceName=voiceName,
|
||||
)
|
||||
|
|
@ -362,7 +362,7 @@ async def _generateAndEmitTts(sessionId: str, speechText: str, currentUser, mand
|
|||
return
|
||||
errorDetail = ttsResult.get("error", "Text-to-Speech failed")
|
||||
await emitSessionEvent(sessionId, "error", {
|
||||
"message": _buildTtsConfigErrorMessage(language, voiceName, errorDetail),
|
||||
"message": buildTtsConfigErrorMessage(language, voiceName, errorDetail),
|
||||
"detail": errorDetail,
|
||||
"ttsLanguage": language,
|
||||
"ttsVoice": voiceName,
|
||||
|
|
@ -370,7 +370,7 @@ async def _generateAndEmitTts(sessionId: str, speechText: str, currentUser, mand
|
|||
except Exception as e:
|
||||
logger.warning(f"TTS failed for session {sessionId}: {e}")
|
||||
await emitSessionEvent(sessionId, "error", {
|
||||
"message": _buildTtsConfigErrorMessage("de-DE", None, str(e)),
|
||||
"message": buildTtsConfigErrorMessage("de-DE", None, str(e)),
|
||||
"detail": str(e),
|
||||
})
|
||||
|
||||
|
|
@ -695,7 +695,7 @@ _TTS_WORD_LIMIT = 200
|
|||
|
||||
async def _prepareSpeechText(fullText: str, callAiFn) -> str:
|
||||
"""Prepare text for TTS. Short responses used directly; long ones get summarized."""
|
||||
cleaned = _stripMarkdownForTts(fullText)
|
||||
cleaned = stripMarkdownForTts(fullText)
|
||||
wordCount = len(cleaned.split())
|
||||
if wordCount <= _TTS_WORD_LIMIT:
|
||||
return cleaned
|
||||
|
|
@ -1107,7 +1107,7 @@ class CommcoachService:
|
|||
if len(messages) < 2:
|
||||
interface.updateSession(sessionId, {
|
||||
"status": CoachingSessionStatus.COMPLETED.value,
|
||||
"endedAt": getIsoTimestamp(),
|
||||
"endedAt": getUtcTimestamp(),
|
||||
"compressedHistorySummary": None,
|
||||
"compressedHistoryUpToMessageCount": None,
|
||||
})
|
||||
|
|
@ -1252,21 +1252,18 @@ class CommcoachService:
|
|||
logger.warning(f"Coaching session indexing failed (non-blocking): {e}")
|
||||
|
||||
# Calculate duration
|
||||
startedAt = session.get("startedAt", "")
|
||||
startedAt = session.get("startedAt")
|
||||
durationSeconds = 0
|
||||
if startedAt:
|
||||
try:
|
||||
from datetime import datetime
|
||||
start = datetime.fromisoformat(startedAt.replace("Z", "+00:00"))
|
||||
end = datetime.now(start.tzinfo) if start.tzinfo else datetime.now()
|
||||
from datetime import datetime, timezone
|
||||
start = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||
end = datetime.now(timezone.utc)
|
||||
durationSeconds = int((end - start).total_seconds())
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Update session - clear compressed history so it never leaks into new sessions
|
||||
sessionUpdates = {
|
||||
"status": CoachingSessionStatus.COMPLETED.value,
|
||||
"endedAt": getIsoTimestamp(),
|
||||
"endedAt": getUtcTimestamp(),
|
||||
"summary": summary,
|
||||
"durationSeconds": durationSeconds,
|
||||
"messageCount": len(messages),
|
||||
|
|
@ -1285,7 +1282,7 @@ class CommcoachService:
|
|||
completedCount = len([s for s in allSessions if s.get("status") == CoachingSessionStatus.COMPLETED.value])
|
||||
interface.updateContext(contextId, {
|
||||
"sessionCount": completedCount,
|
||||
"lastSessionAt": getIsoTimestamp(),
|
||||
"lastSessionAt": getUtcTimestamp(),
|
||||
})
|
||||
|
||||
# Update user profile streak
|
||||
|
|
@ -1324,26 +1321,23 @@ class CommcoachService:
|
|||
if not profile:
|
||||
profile = interface.getOrCreateProfile(self.userId, self.mandateId, self.instanceId)
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timezone
|
||||
|
||||
lastSessionAt = profile.get("lastSessionAt")
|
||||
currentStreak = profile.get("streakDays", 0)
|
||||
longestStreak = profile.get("longestStreak", 0)
|
||||
totalSessions = profile.get("totalSessions", 0)
|
||||
|
||||
today = datetime.now().date()
|
||||
today = datetime.now(timezone.utc).date()
|
||||
isConsecutive = False
|
||||
|
||||
if lastSessionAt:
|
||||
try:
|
||||
lastDate = datetime.fromisoformat(lastSessionAt.replace("Z", "+00:00")).date()
|
||||
lastDate = datetime.fromtimestamp(lastSessionAt, tz=timezone.utc).date()
|
||||
diff = (today - lastDate).days
|
||||
if diff == 1:
|
||||
isConsecutive = True
|
||||
elif diff == 0:
|
||||
isConsecutive = True # Same day, maintain streak
|
||||
except Exception:
|
||||
pass
|
||||
isConsecutive = True
|
||||
|
||||
newStreak = (currentStreak + 1) if isConsecutive else 1
|
||||
newLongest = max(longestStreak, newStreak)
|
||||
|
|
@ -1352,7 +1346,7 @@ class CommcoachService:
|
|||
"streakDays": newStreak,
|
||||
"longestStreak": newLongest,
|
||||
"totalSessions": totalSessions + 1,
|
||||
"lastSessionAt": getIsoTimestamp(),
|
||||
"lastSessionAt": getUtcTimestamp(),
|
||||
})
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to update streak: {e}")
|
||||
|
|
@ -1373,7 +1367,7 @@ class CommcoachService:
|
|||
|
||||
from modules.interfaces.interfaceMessaging import getInterface as getMessagingInterface
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
from modules.shared.notifyMandateAdmins import _renderHtmlEmail, _resolveMandateName
|
||||
from modules.shared.notifyMandateAdmins import renderHtmlEmail, resolveMandateName
|
||||
|
||||
rootInterface = getRootInterface()
|
||||
user = rootInterface.getUser(self.userId)
|
||||
|
|
@ -1382,9 +1376,9 @@ class CommcoachService:
|
|||
|
||||
messaging = getMessagingInterface()
|
||||
subject = f"Coaching-Session Zusammenfassung: {contextTitle}"
|
||||
mandateName = _resolveMandateName(self.mandateId)
|
||||
mandateName = resolveMandateName(self.mandateId)
|
||||
contentHtml = _buildSummaryEmailBlock(emailData, summary, contextTitle)
|
||||
htmlMessage = _renderHtmlEmail(
|
||||
htmlMessage = renderHtmlEmail(
|
||||
"Coaching-Session Zusammenfassung",
|
||||
[
|
||||
f'Thema: {contextTitle}',
|
||||
|
|
@ -1418,13 +1412,12 @@ class CommcoachService:
|
|||
completedSessions = [s for s in allSessions if s.get("status") == CoachingSessionStatus.COMPLETED.value]
|
||||
|
||||
for s in completedSessions:
|
||||
startedAt = s.get("startedAt") or s.get("createdAt") or ""
|
||||
startedAt = s.get("startedAt")
|
||||
if startedAt:
|
||||
try:
|
||||
from datetime import datetime
|
||||
dt = datetime.fromisoformat(str(startedAt).replace("Z", "+00:00"))
|
||||
from datetime import datetime, timezone
|
||||
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||
s["date"] = dt.strftime("%d.%m.%Y")
|
||||
except Exception:
|
||||
else:
|
||||
s["date"] = ""
|
||||
|
||||
result = {
|
||||
|
|
|
|||
|
|
@ -206,14 +206,11 @@ Tool-Nutzung:
|
|||
|
||||
if retrievedSession:
|
||||
dateStr = ""
|
||||
startedAt = retrievedSession.get("startedAt") or retrievedSession.get("createdAt")
|
||||
startedAt = retrievedSession.get("startedAt")
|
||||
if startedAt:
|
||||
try:
|
||||
from datetime import datetime
|
||||
dt = datetime.fromisoformat(str(startedAt).replace("Z", "+00:00"))
|
||||
from datetime import datetime, timezone
|
||||
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||
dateStr = dt.strftime("%d.%m.%Y")
|
||||
except Exception:
|
||||
pass
|
||||
prompt += f"\n\nVom Benutzer angefragte Session ({dateStr}):"
|
||||
prompt += f"\n{retrievedSession.get('summary', '')[:500]}"
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ Intent detection, retrieval strategies, and context assembly for intelligent ses
|
|||
|
||||
import re
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, Dict, Any, List, Tuple
|
||||
from enum import Enum
|
||||
|
||||
|
|
@ -106,18 +106,15 @@ def findSessionByDate(
|
|||
for s in sessions:
|
||||
if s.get("status") != "completed":
|
||||
continue
|
||||
startedAt = s.get("startedAt") or s.get("endedAt") or s.get("createdAt")
|
||||
startedAt = s.get("startedAt") or s.get("endedAt")
|
||||
if not startedAt:
|
||||
continue
|
||||
try:
|
||||
dt = datetime.fromisoformat(startedAt.replace("Z", "+00:00"))
|
||||
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||
sessionDate = dt.date()
|
||||
diff = abs((sessionDate - targetDateOnly).days)
|
||||
if bestDiff is None or diff < bestDiff:
|
||||
bestDiff = diff
|
||||
bestMatch = s
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return bestMatch
|
||||
|
||||
|
|
@ -231,17 +228,14 @@ def buildSessionSummariesForPrompt(
|
|||
and s.get("summary")
|
||||
and s.get("id") != excludeSessionId
|
||||
]
|
||||
completed.sort(key=lambda x: x.get("startedAt") or x.get("createdAt") or "", reverse=True)
|
||||
completed.sort(key=lambda x: x.get("startedAt") or 0, reverse=True)
|
||||
result = []
|
||||
for s in completed[:limit]:
|
||||
startedAt = s.get("startedAt") or s.get("createdAt") or ""
|
||||
startedAt = s.get("startedAt")
|
||||
dateStr = ""
|
||||
if startedAt:
|
||||
try:
|
||||
dt = datetime.fromisoformat(startedAt.replace("Z", "+00:00"))
|
||||
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||
dateStr = dt.strftime("%d.%m.%Y")
|
||||
except Exception:
|
||||
pass
|
||||
result.append({
|
||||
"summary": s.get("summary", ""),
|
||||
"date": dateStr,
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ Generates Markdown and PDF exports for dossiers and sessions.
|
|||
import logging
|
||||
import json
|
||||
from typing import Dict, Any, List, Optional
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -49,7 +49,7 @@ def buildDossierMarkdown(context: Dict[str, Any], sessions: List[Dict[str, Any]]
|
|||
lines.append(f"- {text}")
|
||||
|
||||
completedSessions = [s for s in sessions if s.get("status") == "completed"]
|
||||
completedSessions.sort(key=lambda s: s.get("startedAt") or s.get("createdAt") or "")
|
||||
completedSessions.sort(key=lambda s: s.get("startedAt") or 0)
|
||||
if completedSessions:
|
||||
lines += ["", "## Sessions", ""]
|
||||
for i, s in enumerate(completedSessions, 1):
|
||||
|
|
@ -227,14 +227,14 @@ def _mdToXml(text: str) -> str:
|
|||
|
||||
|
||||
|
||||
def _formatDate(isoStr: Optional[str]) -> str:
|
||||
if not isoStr:
|
||||
return datetime.now().strftime("%d.%m.%Y")
|
||||
try:
|
||||
dt = datetime.fromisoformat(str(isoStr).replace("Z", "+00:00"))
|
||||
def _formatDate(val) -> str:
|
||||
if not val:
|
||||
return datetime.now(timezone.utc).strftime("%d.%m.%Y")
|
||||
if isinstance(val, (int, float)):
|
||||
dt = datetime.fromtimestamp(float(val), tz=timezone.utc)
|
||||
return dt.strftime("%d.%m.%Y")
|
||||
dt = datetime.fromisoformat(str(val).replace("Z", "+00:00"))
|
||||
return dt.strftime("%d.%m.%Y")
|
||||
except Exception:
|
||||
return isoStr
|
||||
|
||||
|
||||
def _parseJson(value, fallback):
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ async def _runDailyReminders():
|
|||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
from .datamodelCommcoach import CoachingUserProfile, CoachingContextStatus
|
||||
from modules.interfaces.interfaceMessaging import getInterface as getMessagingInterface
|
||||
from modules.shared.notifyMandateAdmins import _renderHtmlEmail, _resolveMandateName
|
||||
from modules.shared.notifyMandateAdmins import renderHtmlEmail, resolveMandateName
|
||||
|
||||
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
|
||||
db = DatabaseConnector(
|
||||
|
|
@ -106,8 +106,8 @@ async def _runDailyReminders():
|
|||
contextList = ", ".join(contextTitles)
|
||||
|
||||
subject = "Dein tägliches Coaching wartet"
|
||||
mandateName = _resolveMandateName(profile.get("mandateId"))
|
||||
htmlMessage = _renderHtmlEmail(
|
||||
mandateName = resolveMandateName(profile.get("mandateId"))
|
||||
htmlMessage = renderHtmlEmail(
|
||||
"Zeit für dein tägliches Coaching",
|
||||
[
|
||||
f"Du hast aktuell {len(contexts)} aktive Coaching-Themen.",
|
||||
|
|
|
|||
205
modules/features/graphicalEditor/adapterValidator.py
Normal file
205
modules/features/graphicalEditor/adapterValidator.py
Normal file
|
|
@ -0,0 +1,205 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
Adapter Validator — enforces 5 drift rules between Schicht-3 NodeAdapters
|
||||
and the Schicht-2 Actions they bind to.
|
||||
|
||||
This is the CI-safety net described in the typed-action-architecture plan:
|
||||
any drift between an Editor-Node Adapter and the underlying Action signature
|
||||
must be caught at build time, never silently in production.
|
||||
|
||||
Rules
|
||||
-----
|
||||
1. Every `userParams[].actionArg` exists as a parameter in the bound Action.
|
||||
2. Every required Action parameter is covered by either `userParams` or
|
||||
`contextParams` (i.e. no required arg is silently unset).
|
||||
3. Every Action parameter type exists in PORT_TYPE_CATALOG (or is a primitive).
|
||||
4. The Action `outputType` exists in PORT_TYPE_CATALOG (or is a primitive).
|
||||
5. Every method-bound STATIC node has an Adapter (no orphan node ids).
|
||||
|
||||
Rules 3+4 are already enforced by `_actionSignatureValidator` in Phase 2 —
|
||||
this module composes with it so the report covers both layers.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict, List, Mapping
|
||||
|
||||
from modules.features.graphicalEditor.nodeAdapter import (
|
||||
NodeAdapter,
|
||||
_adapterFromLegacyNode,
|
||||
_isMethodBoundNode,
|
||||
)
|
||||
from modules.workflows.methods._actionSignatureValidator import _validateTypeRef
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdapterValidationReport:
|
||||
"""Aggregated drift report across all adapters."""
|
||||
|
||||
errors: List[str] = field(default_factory=list)
|
||||
warnings: List[str] = field(default_factory=list)
|
||||
|
||||
@property
|
||||
def isHealthy(self) -> bool:
|
||||
return not self.errors
|
||||
|
||||
def merge(self, other: "AdapterValidationReport") -> None:
|
||||
self.errors.extend(other.errors)
|
||||
self.warnings.extend(other.warnings)
|
||||
|
||||
|
||||
def _validateAdapterAgainstAction(
|
||||
adapter: NodeAdapter,
|
||||
actionDef: Any,
|
||||
) -> AdapterValidationReport:
|
||||
"""Apply rules 1-4 to a single Adapter / Action pair.
|
||||
|
||||
`actionDef` is duck-typed so tests can pass dataclasses; production passes
|
||||
a `WorkflowActionDefinition` Pydantic model.
|
||||
"""
|
||||
report = AdapterValidationReport()
|
||||
actionParams: Mapping[str, Any] = getattr(actionDef, "parameters", {}) or {}
|
||||
outputType: str = getattr(actionDef, "outputType", "ActionResult") or "ActionResult"
|
||||
|
||||
# Rule 1: every userParam.actionArg exists in the Action
|
||||
declaredArgs = {up.actionArg for up in adapter.userParams}
|
||||
for arg in declaredArgs:
|
||||
if arg not in actionParams:
|
||||
report.errors.append(
|
||||
f"adapter '{adapter.nodeId}' bindsAction '{adapter.bindsAction}': "
|
||||
f"userParams.actionArg '{arg}' does not exist in action parameters "
|
||||
f"(known: {sorted(actionParams.keys())})"
|
||||
)
|
||||
|
||||
# Rule 2: every required Action arg is covered (userParams OR contextParams)
|
||||
coveredArgs = declaredArgs | set(adapter.contextParams.keys())
|
||||
for paramName, paramDef in actionParams.items():
|
||||
isRequired = bool(getattr(paramDef, "required", False))
|
||||
if isRequired and paramName not in coveredArgs:
|
||||
report.errors.append(
|
||||
f"adapter '{adapter.nodeId}' bindsAction '{adapter.bindsAction}': "
|
||||
f"required action arg '{paramName}' is neither in userParams nor contextParams"
|
||||
)
|
||||
|
||||
# Rule 3: every Action parameter type exists in catalog (re-runs Phase-2 rule)
|
||||
for paramName, paramDef in actionParams.items():
|
||||
typeRef = getattr(paramDef, "type", None)
|
||||
if not typeRef:
|
||||
report.errors.append(
|
||||
f"action '{adapter.bindsAction}.{paramName}': missing 'type' on parameter"
|
||||
)
|
||||
continue
|
||||
for err in _validateTypeRef(typeRef):
|
||||
report.errors.append(
|
||||
f"action '{adapter.bindsAction}.{paramName}': {err}"
|
||||
)
|
||||
|
||||
# Rule 4: Action outputType exists in catalog (or is a generic fire-and-forget type)
|
||||
if outputType not in {"ActionResult", "Transit"}:
|
||||
for err in _validateTypeRef(outputType):
|
||||
report.errors.append(
|
||||
f"action '{adapter.bindsAction}'.outputType: {err}"
|
||||
)
|
||||
|
||||
return report
|
||||
|
||||
|
||||
def _validateAllAdapters(
|
||||
staticNodes: List[Mapping[str, Any]],
|
||||
actionsRegistry: Mapping[str, Mapping[str, Any]],
|
||||
) -> AdapterValidationReport:
|
||||
"""Run rules 1-5 across all method-bound static node definitions.
|
||||
|
||||
Args:
|
||||
staticNodes: list of legacy node-dicts (`STATIC_NODE_TYPES`).
|
||||
actionsRegistry: mapping of method-shortname -> {actionName: WorkflowActionDefinition}.
|
||||
Built from live `methods` registry or test-stubbed methods.
|
||||
|
||||
Returns:
|
||||
Aggregated drift report. `isHealthy` is True only if every method-bound
|
||||
node has a matching Action and all 5 rules pass.
|
||||
"""
|
||||
report = AdapterValidationReport()
|
||||
seenAdapterIds: set[str] = set()
|
||||
|
||||
for node in staticNodes:
|
||||
if not _isMethodBoundNode(node):
|
||||
continue
|
||||
|
||||
adapter = _adapterFromLegacyNode(node)
|
||||
if adapter is None:
|
||||
report.errors.append(
|
||||
f"node '{node.get('id')}' is method-bound but adapter projection failed"
|
||||
)
|
||||
continue
|
||||
seenAdapterIds.add(adapter.nodeId)
|
||||
|
||||
methodName = str(node.get("_method") or "")
|
||||
actionName = str(node.get("_action") or "")
|
||||
methodActions = actionsRegistry.get(methodName) or {}
|
||||
actionDef = methodActions.get(actionName)
|
||||
if actionDef is None:
|
||||
report.errors.append(
|
||||
f"adapter '{adapter.nodeId}' bindsAction '{adapter.bindsAction}': "
|
||||
f"action not found in registry (method '{methodName}' has actions: "
|
||||
f"{sorted(methodActions.keys())})"
|
||||
)
|
||||
continue
|
||||
|
||||
report.merge(_validateAdapterAgainstAction(adapter, actionDef))
|
||||
|
||||
# Rule 5: every Action with dynamicMode=False MUST have an Editor Adapter.
|
||||
# dynamicMode=True actions are agent-only and may legitimately lack one.
|
||||
boundActions: set[str] = set()
|
||||
for node in staticNodes:
|
||||
if not _isMethodBoundNode(node):
|
||||
continue
|
||||
boundActions.add(f"{node.get('_method')}.{node.get('_action')}")
|
||||
|
||||
for methodName, actions in actionsRegistry.items():
|
||||
for actionName, actionDef in actions.items():
|
||||
if bool(getattr(actionDef, "dynamicMode", False)):
|
||||
continue
|
||||
fqn = f"{methodName}.{actionName}"
|
||||
if fqn not in boundActions:
|
||||
report.warnings.append(
|
||||
f"action '{fqn}' has no Editor adapter "
|
||||
f"(set dynamicMode=True if intended as agent-only)"
|
||||
)
|
||||
|
||||
return report
|
||||
|
||||
|
||||
def _formatAdapterReport(report: AdapterValidationReport) -> str:
|
||||
"""Format a report for human-readable logging."""
|
||||
lines: List[str] = []
|
||||
if report.isHealthy and not report.warnings:
|
||||
lines.append("Adapter validator: all healthy.")
|
||||
return "\n".join(lines)
|
||||
|
||||
if report.errors:
|
||||
lines.append(f"Adapter validator: {len(report.errors)} ERROR(s)")
|
||||
for e in report.errors:
|
||||
lines.append(f" ERROR: {e}")
|
||||
if report.warnings:
|
||||
lines.append(f"Adapter validator: {len(report.warnings)} WARNING(s)")
|
||||
for w in report.warnings:
|
||||
lines.append(f" WARN: {w}")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def _buildActionsRegistryFromMethods(
|
||||
methodInstances: Mapping[str, Any],
|
||||
) -> Dict[str, Dict[str, Any]]:
|
||||
"""Convenience: turn `{shortName: methodInstance}` into the registry shape.
|
||||
|
||||
`methodInstance._actions` is a dict of action-name -> WorkflowActionDefinition.
|
||||
"""
|
||||
registry: Dict[str, Dict[str, Any]] = {}
|
||||
for shortName, instance in methodInstances.items():
|
||||
actions = getattr(instance, "_actions", None)
|
||||
if isinstance(actions, dict):
|
||||
registry[shortName] = dict(actions)
|
||||
return registry
|
||||
|
|
@ -68,10 +68,7 @@ class AutoWorkflow(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandanten-ID",
|
||||
"frontend_fk_source": "/api/mandates/",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_model": "Mandate",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -81,10 +78,7 @@ class AutoWorkflow(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Feature-Instanz-ID",
|
||||
"frontend_fk_source": "/api/features/instances",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_model": "FeatureInstance",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
label: str = Field(
|
||||
|
|
@ -104,7 +98,12 @@ class AutoWorkflow(PowerOnModel):
|
|||
isTemplate: bool = Field(
|
||||
default=False,
|
||||
description="Whether this workflow is a template",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Ist Vorlage"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_required": False,
|
||||
"label": "Ist Vorlage",
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
templateSourceId: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -114,18 +113,43 @@ class AutoWorkflow(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Vorlagen-Quelle",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
||||
# Soft FK: holds either a real AutoWorkflow.id (UUID, when copied
|
||||
# from a stored template) OR an in-code sentinel like
|
||||
# "trustee-receipt-import" (when bootstrapped from
|
||||
# featureModule.getTemplateWorkflows()). Sentinel values do not
|
||||
# exist as DB rows by design — orphan cleanup MUST skip this column.
|
||||
"fk_target": {
|
||||
"db": "poweron_graphicaleditor",
|
||||
"table": "AutoWorkflow",
|
||||
"labelField": "label",
|
||||
"softFk": True,
|
||||
},
|
||||
},
|
||||
)
|
||||
templateScope: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Template scope: user, instance, mandate, system (AutoTemplateScope)",
|
||||
json_schema_extra={"frontend_type": "select", "frontend_required": False, "label": "Vorlagen-Bereich"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_required": False,
|
||||
"label": "Vorlagen-Bereich",
|
||||
"frontend_options": [
|
||||
{"value": "user", "label": "Meine"},
|
||||
{"value": "instance", "label": "Instanz"},
|
||||
{"value": "mandate", "label": "Mandant"},
|
||||
{"value": "system", "label": "System"},
|
||||
],
|
||||
},
|
||||
)
|
||||
sharedReadOnly: bool = Field(
|
||||
default=False,
|
||||
description="If true, shared template is read-only for non-owners",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Freigabe nur-lesen"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_required": False,
|
||||
"label": "Freigabe nur-lesen",
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
currentVersionId: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -135,13 +159,18 @@ class AutoWorkflow(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Aktuelle Version",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion", "labelField": "versionNumber"},
|
||||
},
|
||||
)
|
||||
active: bool = Field(
|
||||
default=True,
|
||||
description="Whether workflow is active",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Aktiv"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_required": False,
|
||||
"label": "Aktiv",
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
eventId: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -151,7 +180,12 @@ class AutoWorkflow(PowerOnModel):
|
|||
notifyOnFailure: bool = Field(
|
||||
default=True,
|
||||
description="Send notification (in-app + email) when a run fails",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Bei Fehler benachrichtigen"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_required": False,
|
||||
"label": "Bei Fehler benachrichtigen",
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
# Legacy fields kept for backward compatibility during transition
|
||||
graph: Dict[str, Any] = Field(
|
||||
|
|
@ -184,7 +218,7 @@ class AutoVersion(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"label": "Workflow-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
versionNumber: int = Field(
|
||||
|
|
@ -195,7 +229,16 @@ class AutoVersion(PowerOnModel):
|
|||
status: str = Field(
|
||||
default=AutoWorkflowStatus.DRAFT.value,
|
||||
description="Version status: draft, published, archived",
|
||||
json_schema_extra={"frontend_type": "select", "frontend_required": False, "label": "Status"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_required": False,
|
||||
"label": "Status",
|
||||
"frontend_options": [
|
||||
{"value": "draft", "label": "Entwurf"},
|
||||
{"value": "published", "label": "Veröffentlicht"},
|
||||
{"value": "archived", "label": "Archiviert"},
|
||||
],
|
||||
},
|
||||
)
|
||||
graph: Dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
|
|
@ -210,7 +253,7 @@ class AutoVersion(PowerOnModel):
|
|||
publishedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Timestamp when version was published",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Veröffentlicht am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Veröffentlicht am"},
|
||||
)
|
||||
publishedBy: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -220,7 +263,7 @@ class AutoVersion(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Veröffentlicht von",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -243,7 +286,7 @@ class AutoRun(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"label": "Workflow-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
label: Optional[str] = Field(
|
||||
|
|
@ -259,10 +302,7 @@ class AutoRun(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandanten-ID",
|
||||
"frontend_fk_source": "/api/mandates/",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_model": "Mandate",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
ownerId: Optional[str] = Field(
|
||||
|
|
@ -273,7 +313,7 @@ class AutoRun(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Auslöser",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
versionId: Optional[str] = Field(
|
||||
|
|
@ -284,13 +324,24 @@ class AutoRun(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Versions-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion", "labelField": "versionNumber"},
|
||||
},
|
||||
)
|
||||
status: str = Field(
|
||||
default=AutoRunStatus.RUNNING.value,
|
||||
description="Status: running, paused, completed, failed, cancelled",
|
||||
json_schema_extra={"frontend_type": "text", "frontend_required": False, "label": "Status"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_required": False,
|
||||
"label": "Status",
|
||||
"frontend_options": [
|
||||
{"value": "running", "label": "Läuft"},
|
||||
{"value": "paused", "label": "Pausiert"},
|
||||
{"value": "completed", "label": "Abgeschlossen"},
|
||||
{"value": "failed", "label": "Fehlgeschlagen"},
|
||||
{"value": "cancelled", "label": "Abgebrochen"},
|
||||
],
|
||||
},
|
||||
)
|
||||
trigger: Dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
|
|
@ -300,12 +351,12 @@ class AutoRun(PowerOnModel):
|
|||
startedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Run start timestamp",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
||||
)
|
||||
completedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Run completion timestamp",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
||||
)
|
||||
nodeOutputs: Dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
|
|
@ -357,7 +408,7 @@ class AutoStepLog(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"label": "Lauf-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
nodeId: str = Field(
|
||||
|
|
@ -371,7 +422,18 @@ class AutoStepLog(PowerOnModel):
|
|||
status: str = Field(
|
||||
default=AutoStepStatus.PENDING.value,
|
||||
description="Step status: pending, running, completed, failed, skipped",
|
||||
json_schema_extra={"frontend_type": "text", "frontend_required": False, "label": "Status"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_required": False,
|
||||
"label": "Status",
|
||||
"frontend_options": [
|
||||
{"value": "pending", "label": "Wartend"},
|
||||
{"value": "running", "label": "Läuft"},
|
||||
{"value": "completed", "label": "Abgeschlossen"},
|
||||
{"value": "failed", "label": "Fehlgeschlagen"},
|
||||
{"value": "skipped", "label": "Übersprungen"},
|
||||
],
|
||||
},
|
||||
)
|
||||
inputSnapshot: Dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
|
|
@ -391,12 +453,12 @@ class AutoStepLog(PowerOnModel):
|
|||
startedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Step start timestamp",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
||||
)
|
||||
completedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Step completion timestamp",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
||||
)
|
||||
durationMs: Optional[int] = Field(
|
||||
default=None,
|
||||
|
|
@ -433,7 +495,7 @@ class AutoTask(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"label": "Lauf-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
workflowId: str = Field(
|
||||
|
|
@ -443,7 +505,7 @@ class AutoTask(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"label": "Workflow-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
nodeId: str = Field(
|
||||
|
|
@ -467,13 +529,23 @@ class AutoTask(PowerOnModel):
|
|||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"label": "Zugewiesen an",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
status: str = Field(
|
||||
default=AutoTaskStatus.PENDING.value,
|
||||
description="Status: pending, completed, cancelled, expired",
|
||||
json_schema_extra={"frontend_type": "text", "frontend_required": False, "label": "Status"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_required": False,
|
||||
"label": "Status",
|
||||
"frontend_options": [
|
||||
{"value": "pending", "label": "Wartend"},
|
||||
{"value": "completed", "label": "Abgeschlossen"},
|
||||
{"value": "cancelled", "label": "Abgebrochen"},
|
||||
{"value": "expired", "label": "Abgelaufen"},
|
||||
],
|
||||
},
|
||||
)
|
||||
result: Optional[Dict[str, Any]] = Field(
|
||||
default=None,
|
||||
|
|
@ -483,7 +555,7 @@ class AutoTask(PowerOnModel):
|
|||
expiresAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Expiration timestamp for the task",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_required": False, "label": "Läuft ab am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_required": False, "label": "Läuft ab am"},
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ Uses PostgreSQL poweron_graphicaleditor database (Greenfield).
|
|||
|
||||
import base64
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
from typing import Dict, Any, List, Optional
|
||||
|
||||
|
|
@ -278,6 +279,7 @@ class GraphicalEditorObjects:
|
|||
"workflowId": workflowId,
|
||||
"label": label,
|
||||
"status": "running",
|
||||
"startedAt": time.time(),
|
||||
"nodeOutputs": _make_json_serializable(nodeOutputs or {}),
|
||||
"currentNodeId": None,
|
||||
"context": ctx,
|
||||
|
|
@ -314,6 +316,8 @@ class GraphicalEditorObjects:
|
|||
updates = {}
|
||||
if status is not None:
|
||||
updates["status"] = status
|
||||
if status in ("completed", "failed", "stopped", "cancelled") and not run.get("completedAt"):
|
||||
updates["completedAt"] = time.time()
|
||||
if nodeOutputs is not None:
|
||||
updates["nodeOutputs"] = _make_json_serializable(nodeOutputs)
|
||||
if currentNodeId is not None:
|
||||
|
|
|
|||
172
modules/features/graphicalEditor/nodeAdapter.py
Normal file
172
modules/features/graphicalEditor/nodeAdapter.py
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
Schicht-3 Adapter Layer — projects Schicht-2 Actions into Editor-Node form.
|
||||
|
||||
Architecture (see wiki/c-work/1-plan/2026-04-typed-action-architecture.md):
|
||||
- Schicht 1: Types Catalog (portTypes.PORT_TYPE_CATALOG)
|
||||
- Schicht 2: Methods/Actions (modules/workflows/methods/method*) - source of truth
|
||||
for Backend capabilities (parameter types, output types).
|
||||
- Schicht 3: Adapters (this module) - Editor-Node + AI-Agent-Tool wrappers around
|
||||
Actions. References Action signature, never duplicates types.
|
||||
- Schicht 4: Workflow-Bindings + Agent-Tool-Calls (instance-level wiring).
|
||||
|
||||
This module defines the in-code Adapter representation (NodeAdapter,
|
||||
UserParamMapping) and the projection helpers that convert between the
|
||||
legacy node-dict wire format and the typed Adapter view.
|
||||
|
||||
Wire-format compatibility: the legacy dicts in nodeDefinitions/*.py remain
|
||||
the wire format consumed by the frontend until Phase 4. This module exposes
|
||||
an Adapter VIEW over those dicts so the validator and AI-tool generator can
|
||||
operate on a clean, typed structure without breaking consumers.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict, List, Mapping, Optional
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class UserParamMapping:
|
||||
"""Maps an Action argument into a Node's user-facing parameter.
|
||||
|
||||
The Action signature is the source of truth for type/required/description.
|
||||
This mapping carries Editor-specific overrides (label, UI hints, conditional
|
||||
visibility) but never re-declares the type.
|
||||
"""
|
||||
|
||||
actionArg: str
|
||||
label: Optional[Any] = None
|
||||
description: Optional[Any] = None
|
||||
uiHint: Optional[str] = None
|
||||
frontendOptions: Optional[Any] = None
|
||||
visibleWhen: Optional[Dict[str, Any]] = None
|
||||
defaultValue: Any = None
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class NodeAdapter:
|
||||
"""Schicht-3 Editor-Node adapter — binds to a Schicht-2 Action.
|
||||
|
||||
All type information for `userParams` is inherited from the bound Action.
|
||||
The adapter only carries Editor-specific concerns (UI labels, port topology,
|
||||
icon/color metadata).
|
||||
"""
|
||||
|
||||
nodeId: str
|
||||
bindsAction: str
|
||||
category: str
|
||||
label: Any
|
||||
description: Any
|
||||
userParams: List[UserParamMapping] = field(default_factory=list)
|
||||
contextParams: Dict[str, str] = field(default_factory=dict)
|
||||
inputs: int = 1
|
||||
outputs: int = 1
|
||||
inputAccepts: List[List[str]] = field(default_factory=list)
|
||||
outputLabels: Optional[List[Any]] = None
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
def _isMethodBoundNode(node: Mapping[str, Any]) -> bool:
|
||||
"""True if a legacy node dict is bound to a Schicht-2 Action."""
|
||||
return bool(node.get("_method") and node.get("_action"))
|
||||
|
||||
|
||||
def bindsActionFromLegacy(node: Mapping[str, Any]) -> Optional[str]:
|
||||
"""Build the canonical 'method.action' identifier from a legacy node dict.
|
||||
|
||||
Returns None for framework-primitive nodes (trigger/flow/input/data).
|
||||
"""
|
||||
method = node.get("_method")
|
||||
action = node.get("_action")
|
||||
if not method or not action:
|
||||
return None
|
||||
return f"{method}.{action}"
|
||||
|
||||
|
||||
def _userParamFromLegacyParam(legacyParam: Mapping[str, Any]) -> UserParamMapping:
|
||||
"""Project a legacy parameter dict into a UserParamMapping view.
|
||||
|
||||
The view carries only Editor-overrides; type/required come from the Action.
|
||||
"""
|
||||
return UserParamMapping(
|
||||
actionArg=str(legacyParam.get("name", "")),
|
||||
label=legacyParam.get("label"),
|
||||
description=legacyParam.get("description"),
|
||||
uiHint=legacyParam.get("frontendType"),
|
||||
frontendOptions=legacyParam.get("frontendOptions"),
|
||||
visibleWhen=_extractVisibleWhen(legacyParam.get("frontendOptions")),
|
||||
defaultValue=legacyParam.get("default"),
|
||||
)
|
||||
|
||||
|
||||
def _extractVisibleWhen(frontendOptions: Any) -> Optional[Dict[str, Any]]:
|
||||
"""Extract conditional-visibility hint from legacy frontendOptions.showWhen."""
|
||||
if not isinstance(frontendOptions, dict):
|
||||
return None
|
||||
dependsOn = frontendOptions.get("dependsOn")
|
||||
showWhen = frontendOptions.get("showWhen")
|
||||
if not dependsOn or not showWhen:
|
||||
return None
|
||||
return {"actionArg": str(dependsOn), "in": list(showWhen) if isinstance(showWhen, (list, tuple)) else [showWhen]}
|
||||
|
||||
|
||||
def _adapterFromLegacyNode(node: Mapping[str, Any]) -> Optional[NodeAdapter]:
|
||||
"""Build a NodeAdapter view from a legacy node dict.
|
||||
|
||||
Returns None for framework-primitive nodes (no _method/_action binding).
|
||||
Pure projection — no validation, no Action-signature lookup.
|
||||
"""
|
||||
if not _isMethodBoundNode(node):
|
||||
return None
|
||||
|
||||
bindsAction = bindsActionFromLegacy(node)
|
||||
if not bindsAction:
|
||||
return None
|
||||
|
||||
inputAccepts = _projectInputAccepts(node)
|
||||
|
||||
return NodeAdapter(
|
||||
nodeId=str(node.get("id", "")),
|
||||
bindsAction=bindsAction,
|
||||
category=str(node.get("category", "")),
|
||||
label=node.get("label", ""),
|
||||
description=node.get("description", ""),
|
||||
userParams=[_userParamFromLegacyParam(p) for p in (node.get("parameters") or [])],
|
||||
contextParams={},
|
||||
inputs=int(node.get("inputs", 1)),
|
||||
outputs=int(node.get("outputs", 1)),
|
||||
inputAccepts=inputAccepts,
|
||||
outputLabels=node.get("outputLabels"),
|
||||
meta=dict(node.get("meta") or {}),
|
||||
)
|
||||
|
||||
|
||||
def _projectInputAccepts(node: Mapping[str, Any]) -> List[List[str]]:
|
||||
"""Convert legacy `inputPorts` dict-of-dicts into a per-port `accepts` list."""
|
||||
inputPorts = node.get("inputPorts") or {}
|
||||
if not isinstance(inputPorts, dict):
|
||||
return []
|
||||
inputs = int(node.get("inputs", 0) or 0)
|
||||
if inputs <= 0:
|
||||
return []
|
||||
out: List[List[str]] = []
|
||||
for portIdx in range(inputs):
|
||||
portCfg = inputPorts.get(portIdx) or inputPorts.get(str(portIdx)) or {}
|
||||
accepts = portCfg.get("accepts") if isinstance(portCfg, dict) else None
|
||||
out.append(list(accepts) if isinstance(accepts, (list, tuple)) else [])
|
||||
return out
|
||||
|
||||
|
||||
def _projectAllAdapters(staticNodes: List[Mapping[str, Any]]) -> Dict[str, NodeAdapter]:
|
||||
"""Project a list of legacy node dicts into a {nodeId: NodeAdapter} map.
|
||||
|
||||
Framework-primitive nodes (no Action binding) are silently skipped.
|
||||
"""
|
||||
out: Dict[str, NodeAdapter] = {}
|
||||
for node in staticNodes:
|
||||
adapter = _adapterFromLegacyNode(node)
|
||||
if adapter is not None:
|
||||
out[adapter.nodeId] = adapter
|
||||
return out
|
||||
|
|
@ -12,19 +12,19 @@ AI_NODES = [
|
|||
"parameters": [
|
||||
{"name": "aiPrompt", "type": "string", "required": True, "frontendType": "textarea",
|
||||
"description": t("KI-Prompt")},
|
||||
{"name": "outputFormat", "type": "string", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["text", "json", "emailDraft"]},
|
||||
"description": t("Ausgabeformat"), "default": "text"},
|
||||
{"name": "resultType", "type": "string", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["txt", "json", "md", "csv", "xml", "html", "pdf", "docx", "xlsx", "pptx", "png", "jpg"]},
|
||||
"description": t("Ausgabeformat"), "default": "txt"},
|
||||
{"name": "documentList", "type": "string", "required": False, "frontendType": "hidden",
|
||||
"description": t("Dokumentenliste (via Wire oder DataRef)"), "default": ""},
|
||||
{"name": "context", "type": "string", "required": False, "frontendType": "hidden",
|
||||
"description": t("Kontext-Daten (via Wire oder DataRef)"), "default": ""},
|
||||
{"name": "simpleMode", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Einfacher Modus"), "default": True},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "AiResult", "TextResult", "Transit"]}},
|
||||
"inputPorts": {0: {"accepts": [
|
||||
"DocumentList", "AiResult", "TextResult", "Transit", "LoopItem", "ActionResult",
|
||||
]}},
|
||||
"outputPorts": {0: {"schema": "AiResult"}},
|
||||
"meta": {"icon": "mdi-robot", "color": "#9C27B0", "usesAi": True},
|
||||
"_method": "ai",
|
||||
|
|
@ -53,9 +53,11 @@ AI_NODES = [
|
|||
"label": t("Dokument zusammenfassen"),
|
||||
"description": t("Dokumentinhalt zusammenfassen"),
|
||||
"parameters": [
|
||||
{"name": "documentList", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Dokumentenliste (via Wire oder DataRef)"), "default": ""},
|
||||
{"name": "summaryLength", "type": "string", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["short", "medium", "long"]},
|
||||
"description": t("Kurz, mittel oder lang"), "default": "medium"},
|
||||
"frontendOptions": {"options": ["brief", "medium", "detailed"]},
|
||||
"description": t("Kurz, mittel oder ausführlich"), "default": "medium"},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
|
|
@ -71,9 +73,10 @@ AI_NODES = [
|
|||
"label": t("Dokument übersetzen"),
|
||||
"description": t("Dokument in Zielsprache übersetzen"),
|
||||
"parameters": [
|
||||
{"name": "targetLanguage", "type": "string", "required": True, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["en", "de", "fr", "it", "es", "pt", "nl"]},
|
||||
"description": t("Zielsprache")},
|
||||
{"name": "documentList", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Dokumentenliste (via Wire oder DataRef)"), "default": ""},
|
||||
{"name": "targetLanguage", "type": "string", "required": True, "frontendType": "text",
|
||||
"description": t("Zielsprache (z.B. de, en, French)")},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
|
|
@ -89,8 +92,10 @@ AI_NODES = [
|
|||
"label": t("Dokument konvertieren"),
|
||||
"description": t("Dokument in anderes Format konvertieren"),
|
||||
"parameters": [
|
||||
{"name": "documentList", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Dokumentenliste (via Wire oder DataRef)"), "default": ""},
|
||||
{"name": "targetFormat", "type": "string", "required": True, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["pdf", "docx", "txt", "html", "md"]},
|
||||
"frontendOptions": {"options": ["docx", "pdf", "xlsx", "csv", "txt", "html", "json", "md"]},
|
||||
"description": t("Zielformat")},
|
||||
],
|
||||
"inputs": 1,
|
||||
|
|
@ -126,9 +131,9 @@ AI_NODES = [
|
|||
"parameters": [
|
||||
{"name": "prompt", "type": "string", "required": True, "frontendType": "textarea",
|
||||
"description": t("Code-Generierungs-Prompt")},
|
||||
{"name": "language", "type": "string", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["python", "javascript", "typescript", "java", "csharp", "go"]},
|
||||
"description": t("Programmiersprache"), "default": "python"},
|
||||
{"name": "resultType", "type": "string", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["py", "js", "ts", "html", "java", "cpp", "txt", "json", "csv", "xml"]},
|
||||
"description": t("Datei-Endung der erzeugten Code-Datei"), "default": "py"},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
|
|
|
|||
|
|
@ -94,8 +94,6 @@ CLICKUP_NODES = [
|
|||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "clickup"},
|
||||
"description": t("ClickUp-Verbindung")},
|
||||
{"name": "teamId", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Workspace")},
|
||||
{"name": "pathQuery", "type": "string", "required": False, "frontendType": "clickupList",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Pfad zur Liste")},
|
||||
|
|
@ -144,10 +142,8 @@ CLICKUP_NODES = [
|
|||
"description": t("Task-ID")},
|
||||
{"name": "path", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Oder Pfad")},
|
||||
{"name": "taskUpdateEntries", "type": "object", "required": False, "frontendType": "keyValueRows",
|
||||
"description": t("Zu ändernde Felder")},
|
||||
{"name": "taskUpdate", "type": "string", "required": False, "frontendType": "json",
|
||||
"description": t("JSON für API")},
|
||||
"description": t("JSON-Body für PUT /task/{id}, z.B. {\"name\":\"...\",\"status\":\"...\"}")},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
|
|
@ -172,6 +168,8 @@ CLICKUP_NODES = [
|
|||
"description": t("Oder Pfad")},
|
||||
{"name": "fileName", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Dateiname")},
|
||||
{"name": "content", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Datei-Inhalt aus Upstream-Node (via Wire oder DataRef)"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
|
|
|
|||
|
|
@ -10,14 +10,13 @@ CONTEXT_NODES = [
|
|||
"label": t("Inhalt extrahieren"),
|
||||
"description": t("Dokumentstruktur extrahieren ohne KI (Seiten, Abschnitte, Bilder, Tabellen)"),
|
||||
"parameters": [
|
||||
{"name": "outputDetail", "type": "string", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["full", "structure", "references"]},
|
||||
"description": t("Detailgrad: full = alles, structure = Skelett, references = Dateireferenzen"),
|
||||
"default": "full"},
|
||||
{"name": "includeImages", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Bilder extrahieren"), "default": True},
|
||||
{"name": "includeTables", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Tabellen extrahieren"), "default": True},
|
||||
{"name": "documentList", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Dokumentenliste (via Wire oder DataRef)"), "default": ""},
|
||||
{"name": "extractionOptions", "type": "object", "required": False, "frontendType": "json",
|
||||
"description": t(
|
||||
"Extraktions-Optionen (JSON), z.B. {\"includeImages\": true, \"includeTables\": true, "
|
||||
"\"outputDetail\": \"full\"}"),
|
||||
"default": {}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
|
|
|
|||
|
|
@ -16,27 +16,11 @@ DATA_NODES = [
|
|||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"inputPorts": {0: {"accepts": ["Transit", "AiResult", "LoopItem"]}},
|
||||
"outputPorts": {0: {"schema": "AggregateResult"}},
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-playlist-plus", "color": "#607D8B", "usesAi": False},
|
||||
},
|
||||
{
|
||||
"id": "data.transform",
|
||||
"category": "data",
|
||||
"label": t("Umwandeln"),
|
||||
"description": t("Daten umstrukturieren"),
|
||||
"parameters": [
|
||||
{"name": "mappings", "type": "json", "required": True, "frontendType": "mappingTable",
|
||||
"description": t("Feld-Zuordnungen"), "default": []},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dynamic": True, "deriveFrom": "mappings"}},
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-swap-horizontal-bold", "color": "#607D8B", "usesAi": False},
|
||||
},
|
||||
{
|
||||
"id": "data.filter",
|
||||
"category": "data",
|
||||
|
|
|
|||
|
|
@ -17,14 +17,8 @@ EMAIL_NODES = [
|
|||
"description": t("Ordner"), "default": "Inbox"},
|
||||
{"name": "limit", "type": "number", "required": False, "frontendType": "number",
|
||||
"description": t("Max E-Mails"), "default": 100},
|
||||
{"name": "fromAddress", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Nur von dieser Adresse"), "default": ""},
|
||||
{"name": "subjectContains", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Betreff muss enthalten"), "default": ""},
|
||||
{"name": "hasAttachment", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Nur mit Anhängen"), "default": False},
|
||||
{"name": "filter", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Erweitert: Filter-Text"), "default": ""},
|
||||
"description": t("Filter-Ausdruck (z.B. 'from:max@example.com hasAttachment:true betreff')"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
|
|
@ -43,24 +37,12 @@ EMAIL_NODES = [
|
|||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("E-Mail-Konto Verbindung")},
|
||||
{"name": "query", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Suchbegriff"), "default": ""},
|
||||
{"name": "query", "type": "string", "required": True, "frontendType": "text",
|
||||
"description": t("Suchausdruck (z.B. 'from:max@example.com hasAttachments:true Rechnung')")},
|
||||
{"name": "folder", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Ordner"), "default": "Inbox"},
|
||||
"description": t("Ordner"), "default": "All"},
|
||||
{"name": "limit", "type": "number", "required": False, "frontendType": "number",
|
||||
"description": t("Max E-Mails"), "default": 100},
|
||||
{"name": "fromAddress", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Von Adresse"), "default": ""},
|
||||
{"name": "toAddress", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("An Adresse"), "default": ""},
|
||||
{"name": "subjectContains", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Betreff enthält"), "default": ""},
|
||||
{"name": "bodyContains", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Inhalt enthält"), "default": ""},
|
||||
{"name": "hasAttachment", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Mit Anhängen"), "default": False},
|
||||
{"name": "filter", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Erweitert: KQL-Filter"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
|
|
@ -74,22 +56,24 @@ EMAIL_NODES = [
|
|||
"id": "email.draftEmail",
|
||||
"category": "email",
|
||||
"label": t("E-Mail entwerfen"),
|
||||
"description": t("E-Mail-Entwurf erstellen"),
|
||||
"description": t(
|
||||
"AI-gestützt einen E-Mail-Entwurf aus Kontext und optionalen Dokumenten erstellen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("E-Mail-Konto")},
|
||||
{"name": "subject", "type": "string", "required": True, "frontendType": "text",
|
||||
"description": t("Betreff")},
|
||||
{"name": "body", "type": "string", "required": True, "frontendType": "textarea",
|
||||
"description": t("Inhalt")},
|
||||
{"name": "context", "type": "string", "required": False, "frontendType": "textarea",
|
||||
"description": t("Kontext / Brief-Beschreibung für die KI-Komposition"), "default": ""},
|
||||
{"name": "to", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Empfänger"), "default": ""},
|
||||
{"name": "attachments", "type": "json", "required": False, "frontendType": "attachmentBuilder",
|
||||
"description": t(
|
||||
"Anhänge: Liste von { contentRef | csvFromVariable | base64Content, name, mimeType }. "
|
||||
"Per Wire befüllbar (z.B. CSV aus data.consolidate)."),
|
||||
"default": []},
|
||||
"description": t("Empfänger (komma-separiert, optional für Entwurf)"), "default": ""},
|
||||
{"name": "documentList", "type": "string", "required": False, "frontendType": "hidden",
|
||||
"description": t("Anhang-Dokumente (via Wire oder DataRef)"), "default": ""},
|
||||
{"name": "emailContent", "type": "string", "required": False, "frontendType": "hidden",
|
||||
"description": t("Direkt vorbereiteter Inhalt {subject, body, to} (via Wire — überspringt KI)"),
|
||||
"default": ""},
|
||||
{"name": "emailStyle", "type": "string", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["formal", "casual", "business"]},
|
||||
"description": t("Stil"), "default": "business"},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
|
|
|
|||
|
|
@ -88,7 +88,9 @@ FLOW_NODES = [
|
|||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit", "UdmDocument"]}},
|
||||
"inputPorts": {0: {"accepts": [
|
||||
"Transit", "UdmDocument", "EmailList", "DocumentList", "FileList", "TaskList", "ActionResult",
|
||||
]}},
|
||||
"outputPorts": {0: {"schema": "LoopItem"}},
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-repeat", "color": "#FF9800", "usesAi": False},
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ INPUT_NODES = [
|
|||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "FormPayload", "dynamic": True, "deriveFrom": "fields"}},
|
||||
"outputPorts": {0: {"schema": {"kind": "fromGraph", "parameter": "fields"}}},
|
||||
"executor": "input",
|
||||
"meta": {"icon": "mdi-form-textbox", "color": "#9C27B0", "usesAi": False},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -4,6 +4,19 @@
|
|||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
# Typed FeatureInstance binding (replaces legacy `string, hidden`).
|
||||
# - type FeatureInstanceRef[redmine] is filtered by the DataPicker.
|
||||
# - frontendType "featureInstance" is rendered by FeatureInstancePicker which
|
||||
# loads /options/feature.instance?featureCode=redmine for the current mandate.
|
||||
_REDMINE_INSTANCE_PARAM = {
|
||||
"name": "featureInstanceId",
|
||||
"type": "FeatureInstanceRef[redmine]",
|
||||
"required": True,
|
||||
"frontendType": "featureInstance",
|
||||
"frontendOptions": {"featureCode": "redmine"},
|
||||
"description": t("Redmine-Mandant"),
|
||||
}
|
||||
|
||||
REDMINE_NODES = [
|
||||
{
|
||||
"id": "redmine.readTicket",
|
||||
|
|
@ -11,8 +24,7 @@ REDMINE_NODES = [
|
|||
"label": t("Ticket lesen"),
|
||||
"description": t("Einzelnes Redmine-Ticket aus dem Mirror laden."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Redmine Feature-Instanz-ID")},
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "ticketId", "type": "number", "required": True, "frontendType": "number",
|
||||
"description": t("Redmine-Ticket-ID")},
|
||||
],
|
||||
|
|
@ -30,8 +42,7 @@ REDMINE_NODES = [
|
|||
"label": t("Tickets auflisten"),
|
||||
"description": t("Tickets aus dem lokalen Mirror mit Filtern (Tracker, Status, Zeitraum, Zuweisung)."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Redmine Feature-Instanz-ID")},
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "trackerIds", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Tracker-IDs (Komma-separiert)"), "default": ""},
|
||||
{"name": "status", "type": "string", "required": False, "frontendType": "text",
|
||||
|
|
@ -59,8 +70,7 @@ REDMINE_NODES = [
|
|||
"label": t("Ticket erstellen"),
|
||||
"description": t("Neues Ticket in Redmine anlegen. Mirror wird sofort aktualisiert."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Redmine Feature-Instanz-ID")},
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "subject", "type": "string", "required": True, "frontendType": "text",
|
||||
"description": t("Ticket-Titel")},
|
||||
{"name": "trackerId", "type": "number", "required": True, "frontendType": "number",
|
||||
|
|
@ -92,8 +102,7 @@ REDMINE_NODES = [
|
|||
"label": t("Ticket bearbeiten"),
|
||||
"description": t("Felder eines Redmine-Tickets aktualisieren. Nur gesetzte Felder werden uebertragen."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Redmine Feature-Instanz-ID")},
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "ticketId", "type": "number", "required": True, "frontendType": "number",
|
||||
"description": t("Ticket-ID")},
|
||||
{"name": "subject", "type": "string", "required": False, "frontendType": "text",
|
||||
|
|
@ -129,8 +138,7 @@ REDMINE_NODES = [
|
|||
"label": t("Statistik laden"),
|
||||
"description": t("Aggregierte Kennzahlen (KPIs, Durchsatz, Status-Verteilung, Backlog) aus dem Mirror."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Redmine Feature-Instanz-ID")},
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
|
||||
"description": t("Zeitraum ab")},
|
||||
{"name": "dateTo", "type": "string", "required": False, "frontendType": "date",
|
||||
|
|
@ -154,8 +162,7 @@ REDMINE_NODES = [
|
|||
"label": t("Mirror synchronisieren"),
|
||||
"description": t("Tickets und Beziehungen aus Redmine in den lokalen Mirror uebernehmen."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Redmine Feature-Instanz-ID")},
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "force", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Vollsync erzwingen (ignoriert lastSyncAt)"), "default": False},
|
||||
],
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ SHAREPOINT_NODES = [
|
|||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["FileList", "Transit"]}},
|
||||
"inputPorts": {0: {"accepts": ["FileList", "Transit", "LoopItem"]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
||||
"meta": {"icon": "mdi-file-document", "color": "#0078D4", "usesAi": False},
|
||||
"_method": "sharepoint",
|
||||
|
|
@ -61,6 +61,8 @@ SHAREPOINT_NODES = [
|
|||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFolder",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Zielordner-Pfad")},
|
||||
{"name": "content", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Datei-Inhalt aus Upstream-Node (via Wire oder DataRef)"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
|
|
@ -106,7 +108,7 @@ SHAREPOINT_NODES = [
|
|||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["FileList", "Transit"]}},
|
||||
"inputPorts": {0: {"accepts": ["FileList", "Transit", "LoopItem"]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
||||
"meta": {"icon": "mdi-download", "color": "#0078D4", "usesAi": False},
|
||||
"_method": "sharepoint",
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ TRIGGER_NODES = [
|
|||
"inputs": 0,
|
||||
"outputs": 1,
|
||||
"inputPorts": {},
|
||||
"outputPorts": {0: {"schema": "FormPayload", "dynamic": True, "deriveFrom": "formFields"}},
|
||||
"outputPorts": {0: {"schema": {"kind": "fromGraph", "parameter": "formFields"}}},
|
||||
"executor": "trigger",
|
||||
"meta": {"icon": "mdi-form-select", "color": "#9C27B0", "usesAi": False},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -3,6 +3,20 @@
|
|||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
# Typed FeatureInstance binding (replaces legacy `string, hidden`).
|
||||
# - type uses the discriminator notation `FeatureInstanceRef[<code>]` so the
|
||||
# DataPicker / RequiredAttributePicker can filter compatible upstream paths.
|
||||
# - frontendType "featureInstance" is rendered by FeatureInstancePicker which
|
||||
# loads /options/feature.instance?featureCode=trustee for the current mandate.
|
||||
_TRUSTEE_INSTANCE_PARAM = {
|
||||
"name": "featureInstanceId",
|
||||
"type": "FeatureInstanceRef[trustee]",
|
||||
"required": True,
|
||||
"frontendType": "featureInstance",
|
||||
"frontendOptions": {"featureCode": "trustee"},
|
||||
"description": t("Trustee-Mandant"),
|
||||
}
|
||||
|
||||
TRUSTEE_NODES = [
|
||||
{
|
||||
"id": "trustee.refreshAccountingData",
|
||||
|
|
@ -10,8 +24,7 @@ TRUSTEE_NODES = [
|
|||
"label": t("Buchhaltungsdaten aktualisieren"),
|
||||
"description": t("Buchhaltungsdaten aus externem System importieren/aktualisieren."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Trustee Feature-Instanz-ID")},
|
||||
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||
{"name": "forceRefresh", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Import erzwingen"), "default": False},
|
||||
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
|
||||
|
|
@ -39,15 +52,17 @@ TRUSTEE_NODES = [
|
|||
{"name": "sharepointFolder", "type": "string", "required": False, "frontendType": "sharepointFolder",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("SharePoint-Ordnerpfad"), "default": ""},
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Trustee Feature-Instanz-ID")},
|
||||
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||
{"name": "prompt", "type": "string", "required": False, "frontendType": "textarea",
|
||||
"description": t("AI-Prompt für Extraktion"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit", "AiResult", "LoopItem", "ActionResult"]}},
|
||||
# Runtime returns ActionResult.isSuccess(documents=[...]) — see
|
||||
# actions/extractFromFiles.py. Declaring DocumentList here was adapter
|
||||
# drift and broke the DataPicker for downstream nodes.
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"meta": {"icon": "mdi-file-document-scan", "color": "#4CAF50", "usesAi": True},
|
||||
"_method": "trustee",
|
||||
"_action": "extractFromFiles",
|
||||
|
|
@ -58,14 +73,16 @@ TRUSTEE_NODES = [
|
|||
"label": t("Dokumente verarbeiten"),
|
||||
"description": t("TrusteeDocument + TrusteePosition aus Extraktionsergebnis erstellen."),
|
||||
"parameters": [
|
||||
{"name": "documentList", "type": "string", "required": False, "frontendType": "hidden",
|
||||
"description": t("Automatisch via Wire-Verbindung befüllt")},
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Trustee Feature-Instanz-ID")},
|
||||
# Type matches what producers actually emit: ActionResult.documents
|
||||
# is List[ActionDocument] (see datamodelChat.ActionResult). The
|
||||
# DataPicker uses this string to filter compatible upstream paths.
|
||||
{"name": "documentList", "type": "List[ActionDocument]", "required": True, "frontendType": "dataRef",
|
||||
"description": t("Dokumentenliste — gebunden via DataRef.")},
|
||||
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
||||
"inputPorts": {0: {"accepts": ["ActionResult", "DocumentList", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"meta": {"icon": "mdi-file-document-check", "color": "#4CAF50", "usesAi": False},
|
||||
"_method": "trustee",
|
||||
|
|
@ -77,14 +94,13 @@ TRUSTEE_NODES = [
|
|||
"label": t("In Buchhaltung synchronisieren"),
|
||||
"description": t("Trustee-Positionen in Buchhaltungssystem übertragen."),
|
||||
"parameters": [
|
||||
{"name": "documentList", "type": "string", "required": False, "frontendType": "hidden",
|
||||
"description": t("Automatisch via Wire-Verbindung befüllt")},
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Trustee Feature-Instanz-ID")},
|
||||
{"name": "documentList", "type": "List[ActionDocument]", "required": True, "frontendType": "dataRef",
|
||||
"description": t("Verarbeitete Dokumentenliste — gebunden via DataRef.")},
|
||||
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"inputPorts": {0: {"accepts": ["ActionResult", "DocumentList", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"meta": {"icon": "mdi-calculator", "color": "#4CAF50", "usesAi": False},
|
||||
"_method": "trustee",
|
||||
|
|
@ -96,8 +112,7 @@ TRUSTEE_NODES = [
|
|||
"label": t("Treuhand-Daten abfragen"),
|
||||
"description": t("Daten aus der Trustee-DB lesen (Lookup, Aggregation, Roh-Export). Pendant zu refreshAccountingData ohne externen Sync."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Trustee Feature-Instanz-ID")},
|
||||
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||
{"name": "mode", "type": "string", "required": True, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["lookup", "raw", "aggregate"]},
|
||||
"description": t("Abfragemodus"), "default": "lookup"},
|
||||
|
|
@ -122,7 +137,7 @@ TRUSTEE_NODES = [
|
|||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit", "AiResult", "ConsolidateResult"]}},
|
||||
"inputPorts": {0: {"accepts": ["Transit", "AiResult", "ConsolidateResult", "UdmDocument"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"meta": {"icon": "mdi-database-search", "color": "#4CAF50", "usesAi": False},
|
||||
"_method": "trustee",
|
||||
|
|
|
|||
|
|
@ -6,9 +6,10 @@ Nodes are defined first; IO/method actions are used at execution time.
|
|||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Any
|
||||
from typing import Dict, List, Any, Optional
|
||||
|
||||
from modules.features.graphicalEditor.nodeDefinitions import STATIC_NODE_TYPES
|
||||
from modules.features.graphicalEditor.nodeAdapter import bindsActionFromLegacy
|
||||
from modules.features.graphicalEditor.portTypes import PORT_TYPE_CATALOG, SYSTEM_VARIABLES
|
||||
from modules.shared.i18nRegistry import normalizePrimaryLanguageTag, resolveText
|
||||
|
||||
|
|
@ -41,12 +42,21 @@ def _pickFromLangMap(d: Any, lang: str) -> Any:
|
|||
|
||||
|
||||
def _localizeNode(node: Dict[str, Any], language: str) -> Dict[str, Any]:
|
||||
"""Apply request language via resolveText (t() keys + multilingual dicts)."""
|
||||
"""Apply request language via resolveText (t() keys + multilingual dicts).
|
||||
|
||||
Also exposes Schicht-3 metadata (`bindsAction`) derived from the legacy
|
||||
`_method`/`_action` pair, so frontend consumers can resolve back to the
|
||||
Schicht-2 Action signature without parsing internal underscore-prefixed
|
||||
fields.
|
||||
"""
|
||||
lang = normalizePrimaryLanguageTag(language, "en")
|
||||
bindsAction = bindsActionFromLegacy(node)
|
||||
out = dict(node)
|
||||
for key in list(out.keys()):
|
||||
if key.startswith("_"):
|
||||
del out[key]
|
||||
if bindsAction:
|
||||
out["bindsAction"] = bindsAction
|
||||
lbl = node.get("label")
|
||||
if lbl is not None:
|
||||
out["label"] = resolveText(lbl, lang) or node.get("id", "")
|
||||
|
|
@ -124,3 +134,46 @@ def getNodeTypeToMethodAction() -> Dict[str, tuple]:
|
|||
if method and action:
|
||||
mapping[node["id"]] = (method, action)
|
||||
return mapping
|
||||
|
||||
|
||||
def validateAdaptersAgainstMethods(methodInstances: Optional[Dict[str, Any]] = None) -> Optional[str]:
|
||||
"""Run the Schicht-3 Adapter validator (5 drift rules) against the live methods.
|
||||
|
||||
Intended to be called once at startup after methodDiscovery has populated
|
||||
the methods registry. Returns a human-readable report (None when healthy)
|
||||
so the caller decides whether to log, raise, or surface to operators.
|
||||
|
||||
Pass `methodInstances` directly for testability; defaults to importing
|
||||
the live registry from `methodDiscovery.methods`.
|
||||
"""
|
||||
from modules.features.graphicalEditor.adapterValidator import (
|
||||
_buildActionsRegistryFromMethods,
|
||||
_formatAdapterReport,
|
||||
_validateAllAdapters,
|
||||
)
|
||||
|
||||
if methodInstances is None:
|
||||
try:
|
||||
from modules.workflows.processing.shared.methodDiscovery import methods
|
||||
except Exception as exc:
|
||||
logger.warning("Adapter validator skipped: cannot import methodDiscovery (%s)", exc)
|
||||
return None
|
||||
|
||||
methodInstances = {}
|
||||
for fullName, info in (methods or {}).items():
|
||||
shortName = fullName.replace("Method", "").lower() if fullName[:1].isupper() else fullName
|
||||
instance = info.get("instance") if isinstance(info, dict) else None
|
||||
if instance is not None:
|
||||
methodInstances[shortName] = instance
|
||||
|
||||
if not methodInstances:
|
||||
return None
|
||||
|
||||
actionsRegistry = _buildActionsRegistryFromMethods(methodInstances)
|
||||
report = _validateAllAdapters(list(STATIC_NODE_TYPES), actionsRegistry)
|
||||
formatted = _formatAdapterReport(report)
|
||||
if not report.isHealthy:
|
||||
logger.warning("[adapterValidator] %s", formatted)
|
||||
elif report.warnings:
|
||||
logger.info("[adapterValidator] %s", formatted)
|
||||
return formatted
|
||||
|
|
|
|||
|
|
@ -4,13 +4,14 @@
|
|||
Typed Port System for the Graphical Editor.
|
||||
|
||||
Defines PortSchema, PORT_TYPE_CATALOG, SYSTEM_VARIABLES,
|
||||
output normalizers, input extractors, and Transit helpers.
|
||||
output normalizers, and Transit helpers.
|
||||
|
||||
"""
|
||||
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
from typing import Any, Callable, Dict, List, Optional
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
|
@ -25,9 +26,14 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
class PortField(BaseModel):
|
||||
name: str
|
||||
type: str # str, int, bool, List[str], List[Document], Dict[str,Any]
|
||||
type: str # str, int, bool, List[str], List[Document], Dict[str,Any], ConnectionRef, …
|
||||
description: str = ""
|
||||
required: bool = True
|
||||
enumValues: Optional[List[str]] = None
|
||||
# Marks this field as the discriminator for a Ref-Schema (e.g. ConnectionRef.authority,
|
||||
# FeatureInstanceRef.featureCode). Pickers/validators use it to filter compatible
|
||||
# producers by sub-type. Type must be "str" when discriminator is True.
|
||||
discriminator: bool = False
|
||||
|
||||
|
||||
class PortSchema(BaseModel):
|
||||
|
|
@ -57,13 +63,113 @@ class OutputPortDef(BaseModel):
|
|||
# ---------------------------------------------------------------------------
|
||||
|
||||
PORT_TYPE_CATALOG: Dict[str, PortSchema] = {
|
||||
# -----------------------------------------------------------------
|
||||
# Refs (handles to external resources, pickable by user)
|
||||
# -----------------------------------------------------------------
|
||||
"ConnectionRef": PortSchema(name="ConnectionRef", fields=[
|
||||
PortField(name="id", type="str", description="UserConnection.id (UUID)"),
|
||||
PortField(name="authority", type="str", discriminator=True,
|
||||
description="Auth-Provider-Code: msft | clickup | google | …"),
|
||||
PortField(name="label", type="str", required=False, description="Anzeigename"),
|
||||
]),
|
||||
"FeatureInstanceRef": PortSchema(name="FeatureInstanceRef", fields=[
|
||||
PortField(name="id", type="str", description="FeatureInstance.id (UUID)"),
|
||||
PortField(name="featureCode", type="str", discriminator=True,
|
||||
description="Feature-Modul-Code: trustee | redmine | clickup | sharepoint | …"),
|
||||
PortField(name="label", type="str", required=False, description="Anzeigename"),
|
||||
PortField(name="mandateId", type="str", required=False, description="Zugehöriger Mandant"),
|
||||
]),
|
||||
"ClickUpListRef": PortSchema(name="ClickUpListRef", fields=[
|
||||
PortField(name="listId", type="str", description="ClickUp-Listen-ID"),
|
||||
PortField(name="name", type="str", required=False, description="Listenname"),
|
||||
PortField(name="spaceId", type="str", required=False, description="Space-ID"),
|
||||
PortField(name="folderId", type="str", required=False, description="Ordner-ID"),
|
||||
PortField(name="connection", type="ConnectionRef", required=False,
|
||||
description="ClickUp-Verbindung"),
|
||||
]),
|
||||
"PromptTemplateRef": PortSchema(name="PromptTemplateRef", fields=[
|
||||
PortField(name="id", type="str", description="Prompt-Template-ID"),
|
||||
PortField(name="name", type="str", required=False, description="Anzeigename"),
|
||||
PortField(name="version", type="str", required=False, description="Version / Tag"),
|
||||
]),
|
||||
"SharePointFolderRef": PortSchema(name="SharePointFolderRef", fields=[
|
||||
PortField(name="siteUrl", type="str", required=False, description="SharePoint Site"),
|
||||
PortField(name="driveId", type="str", required=False, description="Drive ID"),
|
||||
PortField(name="folderPath", type="str", required=False, description="Ordnerpfad"),
|
||||
PortField(name="label", type="str", required=False, description="Kurzlabel für Picker"),
|
||||
]),
|
||||
"SharePointFileRef": PortSchema(name="SharePointFileRef", fields=[
|
||||
PortField(name="siteUrl", type="str", required=False, description="SharePoint Site"),
|
||||
PortField(name="driveId", type="str", required=False, description="Drive ID"),
|
||||
PortField(name="filePath", type="str", required=False, description="Dateipfad"),
|
||||
PortField(name="fileName", type="str", required=False, description="Dateiname"),
|
||||
PortField(name="label", type="str", required=False, description="Kurzlabel"),
|
||||
]),
|
||||
"Document": PortSchema(name="Document", fields=[
|
||||
PortField(name="id", type="str", required=False, description="Dokument-/Datei-ID"),
|
||||
PortField(name="name", type="str", required=False, description="Anzeigename"),
|
||||
PortField(name="mimeType", type="str", required=False, description="MIME-Typ"),
|
||||
PortField(name="sizeBytes", type="int", required=False, description="Grösse"),
|
||||
PortField(name="downloadUrl", type="str", required=False, description="Download-URL"),
|
||||
PortField(name="filePath", type="str", required=False, description="Logischer Pfad"),
|
||||
]),
|
||||
"FileItem": PortSchema(name="FileItem", fields=[
|
||||
PortField(name="id", type="str", required=False, description="Datei-ID"),
|
||||
PortField(name="name", type="str", required=False, description="Name"),
|
||||
PortField(name="path", type="str", required=False, description="Pfad"),
|
||||
PortField(name="mimeType", type="str", required=False, description="MIME"),
|
||||
PortField(name="sizeBytes", type="int", required=False, description="Grösse"),
|
||||
]),
|
||||
"EmailItem": PortSchema(name="EmailItem", fields=[
|
||||
PortField(name="id", type="str", required=False, description="Message-ID"),
|
||||
PortField(name="subject", type="str", required=False, description="Betreff"),
|
||||
PortField(name="fromAddress", type="str", required=False, description="Absender"),
|
||||
PortField(name="toAddresses", type="List[str]", required=False, description="Empfänger"),
|
||||
PortField(name="receivedAt", type="str", required=False, description="Empfangen am"),
|
||||
PortField(name="hasAttachments", type="bool", required=False, description="Hat Anhänge"),
|
||||
PortField(name="bodyPreview", type="str", required=False, description="Vorschau"),
|
||||
]),
|
||||
"TaskItem": PortSchema(name="TaskItem", fields=[
|
||||
PortField(name="id", type="str", required=False, description="Task-ID"),
|
||||
PortField(name="title", type="str", required=False, description="Titel"),
|
||||
PortField(name="status", type="str", required=False, description="Status"),
|
||||
PortField(name="assignee", type="str", required=False, description="Assignee"),
|
||||
PortField(name="dueDate", type="str", required=False, description="Fälligkeit"),
|
||||
PortField(name="listId", type="str", required=False, description="ClickUp-Liste"),
|
||||
]),
|
||||
"QueryResult": PortSchema(name="QueryResult", fields=[
|
||||
PortField(name="rows", type="List[Any]", description="Ergebniszeilen"),
|
||||
PortField(name="columns", type="List[str]", required=False, description="Spaltennamen"),
|
||||
PortField(name="count", type="int", required=False, description="Zeilenanzahl"),
|
||||
]),
|
||||
"UdmPage": PortSchema(name="UdmPage", fields=[
|
||||
PortField(name="pageNumber", type="int", required=False, description="Seitennummer"),
|
||||
PortField(name="blocks", type="List[Any]", required=False, description="ContentBlocks"),
|
||||
]),
|
||||
"UdmBlock": PortSchema(name="UdmBlock", fields=[
|
||||
PortField(name="kind", type="str", required=False, description="Block-Typ"),
|
||||
PortField(name="text", type="str", required=False, description="Textinhalt"),
|
||||
PortField(name="children", type="List[Any]", required=False, description="Unterblöcke"),
|
||||
]),
|
||||
"DocumentList": PortSchema(name="DocumentList", fields=[
|
||||
PortField(name="documents", type="List[Document]",
|
||||
description="Dokumentenliste"),
|
||||
PortField(name="connection", type="ConnectionRef", required=False,
|
||||
description="Verbindung, mit der die Liste erzeugt wurde"),
|
||||
PortField(name="source", type="SharePointFolderRef", required=False,
|
||||
description="Herkunftsordner / Quelle"),
|
||||
PortField(name="count", type="int", required=False,
|
||||
description="Anzahl Dokumente"),
|
||||
]),
|
||||
"FileList": PortSchema(name="FileList", fields=[
|
||||
PortField(name="files", type="List[File]",
|
||||
PortField(name="files", type="List[FileItem]",
|
||||
description="Dateiliste"),
|
||||
PortField(name="connection", type="ConnectionRef", required=False,
|
||||
description="Verbindung"),
|
||||
PortField(name="source", type="SharePointFolderRef", required=False,
|
||||
description="Listen-Kontext"),
|
||||
PortField(name="count", type="int", required=False,
|
||||
description="Anzahl Dateien"),
|
||||
]),
|
||||
"EmailDraft": PortSchema(name="EmailDraft", fields=[
|
||||
PortField(name="subject", type="str",
|
||||
|
|
@ -76,14 +182,26 @@ PORT_TYPE_CATALOG: Dict[str, PortSchema] = {
|
|||
description="CC"),
|
||||
PortField(name="attachments", type="List[Document]", required=False,
|
||||
description="Anhänge"),
|
||||
PortField(name="connection", type="ConnectionRef", required=False,
|
||||
description="Outlook-/Graph-Verbindung"),
|
||||
]),
|
||||
"EmailList": PortSchema(name="EmailList", fields=[
|
||||
PortField(name="emails", type="List[Email]",
|
||||
PortField(name="emails", type="List[EmailItem]",
|
||||
description="E-Mails"),
|
||||
PortField(name="connection", type="ConnectionRef", required=False,
|
||||
description="Verbindung"),
|
||||
PortField(name="count", type="int", required=False,
|
||||
description="Anzahl"),
|
||||
]),
|
||||
"TaskList": PortSchema(name="TaskList", fields=[
|
||||
PortField(name="tasks", type="List[Task]",
|
||||
PortField(name="tasks", type="List[TaskItem]",
|
||||
description="Aufgaben"),
|
||||
PortField(name="connection", type="ConnectionRef", required=False,
|
||||
description="Verbindung"),
|
||||
PortField(name="listId", type="str", required=False,
|
||||
description="ClickUp-Listen-ID"),
|
||||
PortField(name="count", type="int", required=False,
|
||||
description="Anzahl"),
|
||||
]),
|
||||
"TaskResult": PortSchema(name="TaskResult", fields=[
|
||||
PortField(name="success", type="bool",
|
||||
|
|
@ -143,11 +261,29 @@ PORT_TYPE_CATALOG: Dict[str, PortSchema] = {
|
|||
PortField(name="merged", type="Dict",
|
||||
description="Zusammengeführte Daten"),
|
||||
]),
|
||||
"ActionDocument": PortSchema(name="ActionDocument", fields=[
|
||||
PortField(name="documentName", type="str",
|
||||
description="Dokumentname"),
|
||||
PortField(name="documentData", type="Any",
|
||||
description="Inhalt / Rohdaten (z.B. JSON-String, Bytes)"),
|
||||
PortField(name="mimeType", type="str",
|
||||
description="MIME-Typ"),
|
||||
PortField(name="fileId", type="str", required=False,
|
||||
description="Persistierte FileItem.id (vom Engine ergänzt)"),
|
||||
PortField(name="fileName", type="str", required=False,
|
||||
description="Persistierter Dateiname (vom Engine ergänzt)"),
|
||||
]),
|
||||
"ActionResult": PortSchema(name="ActionResult", fields=[
|
||||
PortField(name="success", type="bool",
|
||||
description="Erfolg"),
|
||||
PortField(name="error", type="str", required=False,
|
||||
description="Fehler"),
|
||||
# `documents` is populated for every action that returns ActionResult
|
||||
# (see datamodelChat.ActionResult.documents and actionNodeExecutor.out).
|
||||
# Without it in the catalog the DataPicker cannot offer downstream
|
||||
# bindings like `processDocuments → documents → *` for syncToAccounting.
|
||||
PortField(name="documents", type="List[ActionDocument]", required=False,
|
||||
description="Erzeugte Dokumente (immer befüllt für Trustee/AI/Email/...)"),
|
||||
PortField(name="data", type="Dict", required=False,
|
||||
description="Ergebnisdaten"),
|
||||
]),
|
||||
|
|
@ -156,7 +292,11 @@ PORT_TYPE_CATALOG: Dict[str, PortSchema] = {
|
|||
PortField(name="id", type="str", description="Dokument-ID"),
|
||||
PortField(name="sourceType", type="str", description="Quellformat (pdf, docx, …)"),
|
||||
PortField(name="sourcePath", type="str", description="Quellpfad"),
|
||||
PortField(name="children", type="List[Any]", description="StructuralNodes"),
|
||||
PortField(name="children", type="List[Any]", description="StructuralNodes / Seiten"),
|
||||
PortField(name="connection", type="ConnectionRef", required=False,
|
||||
description="Optionale Verbindungsreferenz"),
|
||||
PortField(name="source", type="SharePointFileRef", required=False,
|
||||
description="Optionale Datei-Herkunft"),
|
||||
]),
|
||||
"UdmNodeList": PortSchema(name="UdmNodeList", fields=[
|
||||
PortField(name="nodes", type="List[Any]", description="UDM StructuralNodes oder ContentBlocks"),
|
||||
|
|
@ -167,9 +307,287 @@ PORT_TYPE_CATALOG: Dict[str, PortSchema] = {
|
|||
PortField(name="mode", type="str", description="Konsolidierungsmodus"),
|
||||
PortField(name="count", type="int", description="Anzahl verarbeiteter Elemente"),
|
||||
]),
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
# Shared sub-types (used inside Result schemas)
|
||||
# -----------------------------------------------------------------
|
||||
"ProcessError": PortSchema(name="ProcessError", fields=[
|
||||
PortField(name="documentId", type="str", required=False,
|
||||
description="Betroffenes Dokument (falls zuordbar)"),
|
||||
PortField(name="stage", type="str",
|
||||
description="Pipeline-Stufe: extract | parse | sync | validate | …"),
|
||||
PortField(name="message", type="str", description="Fehlermeldung"),
|
||||
PortField(name="code", type="str", required=False, description="Fehler-Code"),
|
||||
]),
|
||||
"JournalLine": PortSchema(name="JournalLine", fields=[
|
||||
PortField(name="id", type="str", required=False, description="Buchungszeilen-ID"),
|
||||
PortField(name="bookingDate", type="str", description="Buchungsdatum (ISO)"),
|
||||
PortField(name="account", type="str", description="Konto"),
|
||||
PortField(name="contraAccount", type="str", required=False, description="Gegenkonto"),
|
||||
PortField(name="amount", type="float", description="Betrag"),
|
||||
PortField(name="currency", type="str", required=False, description="Währung"),
|
||||
PortField(name="text", type="str", required=False, description="Buchungstext"),
|
||||
PortField(name="reference", type="str", required=False, description="Beleg-Referenz"),
|
||||
]),
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
# Trustee Action Results
|
||||
# -----------------------------------------------------------------
|
||||
"TrusteeRefreshResult": PortSchema(name="TrusteeRefreshResult", fields=[
|
||||
PortField(name="syncCounts", type="Dict[str,int]",
|
||||
description="Tabellen → Anzahl synchronisierter Datensätze"),
|
||||
PortField(name="oldestBookingDate", type="str", required=False,
|
||||
description="Ältestes Buchungsdatum (ISO)"),
|
||||
PortField(name="newestBookingDate", type="str", required=False,
|
||||
description="Neuestes Buchungsdatum (ISO)"),
|
||||
PortField(name="durationMs", type="int", required=False,
|
||||
description="Dauer in Millisekunden"),
|
||||
PortField(name="featureInstance", type="FeatureInstanceRef", required=False,
|
||||
description="Trustee-Instanz"),
|
||||
PortField(name="errors", type="List[ProcessError]", required=False,
|
||||
description="Fehler-Liste"),
|
||||
]),
|
||||
"TrusteeProcessResult": PortSchema(name="TrusteeProcessResult", fields=[
|
||||
PortField(name="documents", type="List[Document]",
|
||||
description="Verarbeitete Dokumente mit angereicherten Daten"),
|
||||
PortField(name="processedCount", type="int", required=False,
|
||||
description="Anzahl erfolgreich verarbeiteter Dokumente"),
|
||||
PortField(name="failedCount", type="int", required=False,
|
||||
description="Anzahl fehlgeschlagener Dokumente"),
|
||||
PortField(name="featureInstance", type="FeatureInstanceRef", required=False,
|
||||
description="Trustee-Instanz"),
|
||||
PortField(name="errors", type="List[ProcessError]", required=False,
|
||||
description="Fehler-Liste"),
|
||||
]),
|
||||
"TrusteeSyncResult": PortSchema(name="TrusteeSyncResult", fields=[
|
||||
PortField(name="syncedCount", type="int",
|
||||
description="Erfolgreich in das Buchhaltungssystem übertragene Datensätze"),
|
||||
PortField(name="failedCount", type="int", required=False,
|
||||
description="Fehlgeschlagene Übertragungen"),
|
||||
PortField(name="journalLines", type="List[JournalLine]", required=False,
|
||||
description="Erzeugte Buchungszeilen"),
|
||||
PortField(name="featureInstance", type="FeatureInstanceRef", required=False,
|
||||
description="Ziel-Trustee-Instanz"),
|
||||
PortField(name="errors", type="List[ProcessError]", required=False,
|
||||
description="Fehler-Liste"),
|
||||
]),
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
# Redmine Action Results
|
||||
# -----------------------------------------------------------------
|
||||
"RedmineTicket": PortSchema(name="RedmineTicket", fields=[
|
||||
PortField(name="id", type="str", description="Ticket-ID"),
|
||||
PortField(name="subject", type="str", description="Betreff"),
|
||||
PortField(name="description", type="str", required=False, description="Beschreibung"),
|
||||
PortField(name="status", type="str", description="Status-Name"),
|
||||
PortField(name="tracker", type="str", required=False,
|
||||
description="Tracker (Bug, Feature, Task, …)"),
|
||||
PortField(name="priority", type="str", required=False, description="Priorität"),
|
||||
PortField(name="assignee", type="str", required=False, description="Zugewiesen an"),
|
||||
PortField(name="author", type="str", required=False, description="Autor"),
|
||||
PortField(name="project", type="str", required=False, description="Projekt"),
|
||||
PortField(name="createdOn", type="str", required=False, description="Erstellt (ISO)"),
|
||||
PortField(name="updatedOn", type="str", required=False, description="Aktualisiert (ISO)"),
|
||||
PortField(name="dueDate", type="str", required=False, description="Fälligkeitsdatum"),
|
||||
PortField(name="featureInstance", type="FeatureInstanceRef", required=False,
|
||||
description="Redmine-Instanz"),
|
||||
]),
|
||||
"RedmineTicketList": PortSchema(name="RedmineTicketList", fields=[
|
||||
PortField(name="tickets", type="List[RedmineTicket]", description="Ticket-Liste"),
|
||||
PortField(name="count", type="int", required=False, description="Anzahl Tickets"),
|
||||
PortField(name="filters", type="Dict[str,Any]", required=False,
|
||||
description="Angewendete Filter"),
|
||||
PortField(name="featureInstance", type="FeatureInstanceRef", required=False,
|
||||
description="Redmine-Instanz"),
|
||||
]),
|
||||
"RedmineStats": PortSchema(name="RedmineStats", fields=[
|
||||
PortField(name="kpis", type="Dict[str,Any]",
|
||||
description="Key Performance Indicators"),
|
||||
PortField(name="throughput", type="Dict[str,Any]", required=False,
|
||||
description="Durchsatz pro Zeitraum"),
|
||||
PortField(name="statusDistribution", type="Dict[str,int]", required=False,
|
||||
description="Tickets pro Status"),
|
||||
PortField(name="backlog", type="Dict[str,Any]", required=False,
|
||||
description="Backlog-Statistik"),
|
||||
PortField(name="featureInstance", type="FeatureInstanceRef", required=False,
|
||||
description="Redmine-Instanz"),
|
||||
]),
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
# ClickUp / SharePoint / Email helper results
|
||||
# -----------------------------------------------------------------
|
||||
"TaskAttachmentRef": PortSchema(name="TaskAttachmentRef", fields=[
|
||||
PortField(name="taskId", type="str", description="Aufgaben-ID"),
|
||||
PortField(name="attachmentId", type="str", required=False, description="Attachment-ID"),
|
||||
PortField(name="fileName", type="str", required=False, description="Dateiname"),
|
||||
PortField(name="url", type="str", required=False, description="Download-URL"),
|
||||
]),
|
||||
"AttachmentSpec": PortSchema(name="AttachmentSpec", fields=[
|
||||
PortField(name="source", type="str",
|
||||
description="Quellart: path | document | url",
|
||||
enumValues=["path", "document", "url"]),
|
||||
PortField(name="ref", type="str",
|
||||
description="Referenzwert (Pfad / Document.id / URL)"),
|
||||
PortField(name="fileName", type="str", required=False,
|
||||
description="Override-Dateiname"),
|
||||
PortField(name="mimeType", type="str", required=False, description="MIME-Override"),
|
||||
]),
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
# Expressions (replace string-typed condition / cron params)
|
||||
# -----------------------------------------------------------------
|
||||
"CronExpression": PortSchema(name="CronExpression", fields=[
|
||||
PortField(name="expression", type="str",
|
||||
description="Cron-Ausdruck (5 oder 6 Felder)"),
|
||||
PortField(name="timezone", type="str", required=False,
|
||||
description="IANA Timezone (z.B. Europe/Zurich)"),
|
||||
]),
|
||||
"ConditionExpression": PortSchema(name="ConditionExpression", fields=[
|
||||
PortField(name="expression", type="str", description="Boolescher Ausdruck"),
|
||||
PortField(name="syntax", type="str", required=False,
|
||||
description="jmespath | jsonlogic | python | template",
|
||||
enumValues=["jmespath", "jsonlogic", "python", "template"]),
|
||||
]),
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
# Semantic primitives (give meaning to scalar str values)
|
||||
# -----------------------------------------------------------------
|
||||
"DateTime": PortSchema(name="DateTime", fields=[
|
||||
PortField(name="iso", type="str", description="ISO-8601 Datum/Zeit"),
|
||||
PortField(name="timezone", type="str", required=False,
|
||||
description="IANA Timezone"),
|
||||
]),
|
||||
"Url": PortSchema(name="Url", fields=[
|
||||
PortField(name="url", type="str", description="Vollständige URL"),
|
||||
PortField(name="label", type="str", required=False, description="Anzeigename"),
|
||||
]),
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Catalog validator
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Primitives accepted as PortField.type in addition to catalog schema names.
|
||||
PRIMITIVE_TYPES: frozenset = frozenset({
|
||||
"str", "int", "bool", "float", "Any", "Dict", "List",
|
||||
})
|
||||
|
||||
|
||||
def _stripContainer(typeStr: str) -> List[str]:
|
||||
"""
|
||||
Extract referenced type names from a PortField.type string.
|
||||
|
||||
Examples:
|
||||
"str" -> ["str"]
|
||||
"List[Document]" -> ["Document"]
|
||||
"Dict[str,Any]" -> ["str", "Any"]
|
||||
"ConnectionRef" -> ["ConnectionRef"]
|
||||
"List[ProcessError]" -> ["ProcessError"]
|
||||
"""
|
||||
s = (typeStr or "").strip()
|
||||
if not s:
|
||||
return []
|
||||
if "[" in s and s.endswith("]"):
|
||||
# outer container ignored, inner parts split by comma
|
||||
inner = s[s.index("[") + 1 : -1]
|
||||
parts = [p.strip() for p in inner.split(",") if p.strip()]
|
||||
return parts or [s]
|
||||
return [s]
|
||||
|
||||
|
||||
def _isKnownType(typeName: str) -> bool:
|
||||
return typeName in PRIMITIVE_TYPES or typeName in PORT_TYPE_CATALOG
|
||||
|
||||
|
||||
def _validateCatalog() -> List[str]:
|
||||
"""
|
||||
Validate PORT_TYPE_CATALOG integrity.
|
||||
|
||||
Returns a list of error messages. Empty list means catalog is healthy.
|
||||
|
||||
Checks:
|
||||
1. Every PortField.type references either a primitive or a known schema.
|
||||
2. Discriminator fields exist, are typed "str", and at most one per schema.
|
||||
3. No cyclic references via required schema-typed fields
|
||||
(optional fields may form cycles intentionally, e.g. provenance).
|
||||
4. Schema name in catalog key matches PortSchema.name.
|
||||
"""
|
||||
errors: List[str] = []
|
||||
|
||||
# Check 4: key consistency
|
||||
for key, schema in PORT_TYPE_CATALOG.items():
|
||||
if schema.name != key:
|
||||
errors.append(f"Catalog key '{key}' does not match schema.name '{schema.name}'")
|
||||
|
||||
# Check 1 + 2: type refs and discriminators
|
||||
for schemaName, schema in PORT_TYPE_CATALOG.items():
|
||||
discriminatorCount = 0
|
||||
for field in schema.fields:
|
||||
for refName in _stripContainer(field.type):
|
||||
if not _isKnownType(refName):
|
||||
errors.append(
|
||||
f"{schemaName}.{field.name}: unknown type '{refName}' "
|
||||
f"(not a primitive and not in catalog)"
|
||||
)
|
||||
if field.discriminator:
|
||||
discriminatorCount += 1
|
||||
if field.type != "str":
|
||||
errors.append(
|
||||
f"{schemaName}.{field.name}: discriminator must be 'str', got '{field.type}'"
|
||||
)
|
||||
if discriminatorCount > 1:
|
||||
errors.append(
|
||||
f"{schemaName}: has {discriminatorCount} discriminator fields, max 1 allowed"
|
||||
)
|
||||
|
||||
# Check 3: cycles via required schema-typed fields
|
||||
def _requiredSchemaRefs(name: str) -> List[str]:
|
||||
sch = PORT_TYPE_CATALOG.get(name)
|
||||
if not sch:
|
||||
return []
|
||||
out: List[str] = []
|
||||
for field in sch.fields:
|
||||
if not field.required:
|
||||
continue
|
||||
for ref in _stripContainer(field.type):
|
||||
if ref in PORT_TYPE_CATALOG:
|
||||
out.append(ref)
|
||||
return out
|
||||
|
||||
def _hasCycle(start: str) -> Optional[List[str]]:
|
||||
stack: List[str] = [start]
|
||||
path: List[str] = []
|
||||
visiting: set = set()
|
||||
|
||||
def _dfs(name: str) -> Optional[List[str]]:
|
||||
if name in visiting:
|
||||
return path + [name]
|
||||
visiting.add(name)
|
||||
path.append(name)
|
||||
for ref in _requiredSchemaRefs(name):
|
||||
if ref == start and len(path) > 0:
|
||||
return path + [ref]
|
||||
cycle = _dfs(ref)
|
||||
if cycle:
|
||||
return cycle
|
||||
path.pop()
|
||||
visiting.discard(name)
|
||||
return None
|
||||
|
||||
return _dfs(start)
|
||||
|
||||
for schemaName in PORT_TYPE_CATALOG.keys():
|
||||
cycle = _hasCycle(schemaName)
|
||||
if cycle and cycle[0] == schemaName:
|
||||
errors.append(
|
||||
f"{schemaName}: cyclic required-ref chain: {' -> '.join(cycle)}"
|
||||
)
|
||||
break # one cycle is enough — avoid spamming
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# SYSTEM_VARIABLES
|
||||
# ---------------------------------------------------------------------------
|
||||
|
|
@ -192,7 +610,7 @@ SYSTEM_VARIABLES: Dict[str, Dict[str, str]] = {
|
|||
}
|
||||
|
||||
|
||||
def _resolveSystemVariable(variable: str, context: Dict[str, Any]) -> Any:
|
||||
def resolveSystemVariable(variable: str, context: Dict[str, Any]) -> Any:
|
||||
"""Resolve a system variable name to its runtime value."""
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
|
@ -224,7 +642,7 @@ def _resolveSystemVariable(variable: str, context: Dict[str, Any]) -> Any:
|
|||
# Output normalizers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _normalizeToSchema(raw: Any, schemaName: str) -> Dict[str, Any]:
|
||||
def normalizeToSchema(raw: Any, schemaName: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Normalize raw executor output to match the declared port schema.
|
||||
Ensures _success/_error meta-fields are always present.
|
||||
|
|
@ -259,6 +677,8 @@ def _defaultForType(typeStr: str) -> Any:
|
|||
return 0
|
||||
if typeStr == "str":
|
||||
return ""
|
||||
if typeStr in PORT_TYPE_CATALOG:
|
||||
return {}
|
||||
return None
|
||||
|
||||
|
||||
|
|
@ -272,220 +692,16 @@ def _normalizeError(error: Exception, schemaName: str) -> Dict[str, Any]:
|
|||
return result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Input extractors (one per input port type)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _extractEmailDraft(upstream: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Extract EmailDraft fields from upstream output."""
|
||||
result = {}
|
||||
if upstream.get("responseData") and isinstance(upstream["responseData"], dict):
|
||||
rd = upstream["responseData"]
|
||||
for key in ("subject", "body", "to", "cc"):
|
||||
if key in rd:
|
||||
result[key] = rd[key]
|
||||
if not result:
|
||||
for key in ("subject", "body", "to", "cc"):
|
||||
if key in upstream:
|
||||
result[key] = upstream[key]
|
||||
return result
|
||||
|
||||
|
||||
def _extractDocuments(upstream: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Extract documents from upstream output."""
|
||||
docs = upstream.get("documents") or upstream.get("documentList") or []
|
||||
if not docs and isinstance(upstream.get("data"), dict):
|
||||
docs = upstream["data"].get("documents") or upstream["data"].get("documentList") or []
|
||||
# input.upload format
|
||||
if not docs:
|
||||
files = upstream.get("files") or []
|
||||
fileObj = upstream.get("file")
|
||||
fileIds = upstream.get("fileIds") or []
|
||||
if fileObj:
|
||||
docs = [fileObj]
|
||||
elif files:
|
||||
docs = files
|
||||
elif fileIds:
|
||||
docs = [{"validationMetadata": {"fileId": fid}} for fid in fileIds]
|
||||
normalized = docs if isinstance(docs, list) else [docs]
|
||||
return {"documents": normalized, "documentList": normalized} if docs else {}
|
||||
|
||||
|
||||
def _extractText(upstream: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Extract text from upstream output."""
|
||||
text = upstream.get("text") or upstream.get("response") or upstream.get("context") or ""
|
||||
if not text and upstream.get("payload"):
|
||||
import json
|
||||
payload = upstream["payload"]
|
||||
text = json.dumps(payload, ensure_ascii=False) if isinstance(payload, dict) else str(payload)
|
||||
return {"text": str(text)} if text else {}
|
||||
|
||||
|
||||
def _extractEmailList(upstream: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Extract email list from upstream output."""
|
||||
emails = upstream.get("emails") or []
|
||||
if not emails:
|
||||
docs = upstream.get("documents") or upstream.get("documentList") or []
|
||||
if docs:
|
||||
import json
|
||||
for doc in docs:
|
||||
raw = doc.get("documentData") if isinstance(doc, dict) else None
|
||||
if raw:
|
||||
try:
|
||||
data = json.loads(raw) if isinstance(raw, str) else raw
|
||||
if isinstance(data, dict):
|
||||
found = (data.get("emails", {}).get("emails", [])
|
||||
or data.get("searchResults", {}).get("results", []))
|
||||
if found:
|
||||
emails = found
|
||||
break
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
pass
|
||||
return {"emails": emails} if emails else {}
|
||||
|
||||
|
||||
def _extractTaskList(upstream: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Extract task list from upstream output."""
|
||||
tasks = upstream.get("tasks") or []
|
||||
if not tasks:
|
||||
docs = upstream.get("documents") or upstream.get("documentList") or []
|
||||
if docs:
|
||||
import json
|
||||
for doc in docs:
|
||||
raw = doc.get("documentData") if isinstance(doc, dict) else None
|
||||
if raw:
|
||||
try:
|
||||
data = json.loads(raw) if isinstance(raw, str) else raw
|
||||
if isinstance(data, dict) and "tasks" in data:
|
||||
tasks = data["tasks"]
|
||||
break
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
pass
|
||||
return {"tasks": tasks} if tasks else {}
|
||||
|
||||
|
||||
def _extractFileList(upstream: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Extract file list from upstream output."""
|
||||
files = upstream.get("files") or []
|
||||
return {"files": files} if files else {}
|
||||
|
||||
|
||||
def _extractFormPayload(upstream: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Extract form payload from upstream output."""
|
||||
payload = upstream.get("payload")
|
||||
if payload and isinstance(payload, dict):
|
||||
return {"payload": payload}
|
||||
return {}
|
||||
|
||||
|
||||
def _extractAiResult(upstream: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Extract AI result fields from upstream output."""
|
||||
result = {}
|
||||
for key in ("prompt", "response", "responseData", "context", "documents"):
|
||||
if key in upstream:
|
||||
result[key] = upstream[key]
|
||||
return result
|
||||
|
||||
|
||||
def _extractBoolResult(upstream: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Extract bool result from upstream output."""
|
||||
result = upstream.get("result")
|
||||
if isinstance(result, bool):
|
||||
return {"result": result, "reason": upstream.get("reason", "")}
|
||||
approved = upstream.get("approved")
|
||||
if isinstance(approved, bool):
|
||||
return {"result": approved, "reason": upstream.get("reason", "")}
|
||||
return {}
|
||||
|
||||
|
||||
def _extractTaskResult(upstream: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Extract task result from upstream output."""
|
||||
result = {}
|
||||
if "taskId" in upstream:
|
||||
result["taskId"] = upstream["taskId"]
|
||||
if "task" in upstream:
|
||||
result["task"] = upstream["task"]
|
||||
elif "clickupTask" in upstream:
|
||||
result["task"] = upstream["clickupTask"]
|
||||
if "success" in upstream:
|
||||
result["success"] = upstream["success"]
|
||||
return result
|
||||
|
||||
|
||||
def _extractAggregateResult(upstream: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Extract aggregate result from upstream output."""
|
||||
items = upstream.get("items") or []
|
||||
return {"items": items, "count": len(items)}
|
||||
|
||||
|
||||
def _extractMergeResult(upstream: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Extract merge result from upstream output."""
|
||||
return {
|
||||
"inputs": upstream.get("inputs", {}),
|
||||
"first": upstream.get("first"),
|
||||
"merged": upstream.get("merged", {}),
|
||||
}
|
||||
|
||||
|
||||
def _extractUdmDocument(upstream: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Extract UdmDocument fields from upstream output."""
|
||||
if upstream.get("children") is not None and upstream.get("sourceType"):
|
||||
return upstream
|
||||
udm = upstream.get("udm")
|
||||
if isinstance(udm, dict) and udm.get("children") is not None:
|
||||
return udm
|
||||
return {}
|
||||
|
||||
|
||||
def _extractUdmNodeList(upstream: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Extract UdmNodeList fields from upstream output."""
|
||||
nodes = upstream.get("nodes")
|
||||
if isinstance(nodes, list):
|
||||
return {"nodes": nodes, "count": len(nodes)}
|
||||
children = upstream.get("children")
|
||||
if isinstance(children, list):
|
||||
return {"nodes": children, "count": len(children)}
|
||||
return {}
|
||||
|
||||
|
||||
def _extractConsolidateResult(upstream: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Extract ConsolidateResult fields from upstream output."""
|
||||
result = {}
|
||||
for key in ("result", "mode", "count"):
|
||||
if key in upstream:
|
||||
result[key] = upstream[key]
|
||||
return result
|
||||
|
||||
|
||||
INPUT_EXTRACTORS: Dict[str, Callable] = {
|
||||
"EmailDraft": _extractEmailDraft,
|
||||
"DocumentList": _extractDocuments,
|
||||
"TextResult": _extractText,
|
||||
"EmailList": _extractEmailList,
|
||||
"TaskList": _extractTaskList,
|
||||
"FileList": _extractFileList,
|
||||
"FormPayload": _extractFormPayload,
|
||||
"AiResult": _extractAiResult,
|
||||
"BoolResult": _extractBoolResult,
|
||||
"TaskResult": _extractTaskResult,
|
||||
"AggregateResult": _extractAggregateResult,
|
||||
"MergeResult": _extractMergeResult,
|
||||
"UdmDocument": _extractUdmDocument,
|
||||
"UdmNodeList": _extractUdmNodeList,
|
||||
"ConsolidateResult": _extractConsolidateResult,
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Transit helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _wrapTransit(data: Any, meta: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def wrapTransit(data: Any, meta: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Wrap data in a Transit envelope."""
|
||||
return {"_transit": True, "_meta": meta, "data": data}
|
||||
|
||||
|
||||
def _unwrapTransit(output: Any) -> Any:
|
||||
def unwrapTransit(output: Any) -> Any:
|
||||
"""Unwrap a Transit envelope, returning the inner data."""
|
||||
if isinstance(output, dict) and output.get("_transit"):
|
||||
return output.get("data")
|
||||
|
|
@ -510,10 +726,10 @@ def _resolveTransitChain(
|
|||
return out
|
||||
sources = connectionMap.get(current, [])
|
||||
if not sources:
|
||||
return _unwrapTransit(out)
|
||||
return unwrapTransit(out)
|
||||
srcId = sources[0][0] if sources else None
|
||||
if not srcId:
|
||||
return _unwrapTransit(out)
|
||||
return unwrapTransit(out)
|
||||
current = srcId
|
||||
return nodeOutputs.get(nodeId)
|
||||
|
||||
|
|
@ -522,27 +738,83 @@ def _resolveTransitChain(
|
|||
# Schema derivation for dynamic outputs
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _deriveFormPayloadSchema(node: Dict[str, Any]) -> Optional[PortSchema]:
|
||||
"""Derive output schema from form field definitions."""
|
||||
fields_param = (node.get("parameters") or {}).get("fields")
|
||||
def deriveFormPayloadSchemaFromParam(node: Dict[str, Any], param_key: str) -> Optional[PortSchema]:
|
||||
"""Derive output schema from a field-builder JSON list (``fields``, ``formFields``, …)."""
|
||||
fields_param = (node.get("parameters") or {}).get(param_key)
|
||||
if not fields_param or not isinstance(fields_param, list):
|
||||
return None
|
||||
portFields = []
|
||||
for f in fields_param:
|
||||
if isinstance(f, dict) and f.get("name"):
|
||||
_lab = f.get("label")
|
||||
_desc = resolveText(_lab) if _lab is not None else f["name"]
|
||||
if not _desc.strip():
|
||||
_desc = f["name"]
|
||||
portFields: List[PortField] = []
|
||||
|
||||
def _append_field(fname: str, ftype: Any, lab: Any, required: bool) -> None:
|
||||
_desc = resolveText(lab) if lab is not None else fname
|
||||
if not str(_desc).strip():
|
||||
_desc = fname
|
||||
portFields.append(PortField(
|
||||
name=f["name"],
|
||||
type=f.get("type", "str"),
|
||||
name=fname,
|
||||
type=str(ftype) if ftype is not None else "str",
|
||||
description=_desc,
|
||||
required=f.get("required", False),
|
||||
required=required,
|
||||
))
|
||||
|
||||
for f in fields_param:
|
||||
if not isinstance(f, dict) or not f.get("name"):
|
||||
continue
|
||||
fname = str(f["name"])
|
||||
if str(f.get("type", "")).lower() == "group" and isinstance(f.get("fields"), list):
|
||||
for sub in f["fields"]:
|
||||
if isinstance(sub, dict) and sub.get("name"):
|
||||
_append_field(
|
||||
f"{fname}.{sub['name']}",
|
||||
sub.get("type", "str"),
|
||||
sub.get("label"),
|
||||
bool(sub.get("required", False)),
|
||||
)
|
||||
continue
|
||||
_append_field(fname, f.get("type", "str"), f.get("label"), bool(f.get("required", False)))
|
||||
return PortSchema(name="FormPayload_dynamic", fields=portFields) if portFields else None
|
||||
|
||||
|
||||
def _deriveFormPayloadSchema(node: Dict[str, Any]) -> Optional[PortSchema]:
|
||||
"""Derive output schema from form field definitions (``parameters.fields``)."""
|
||||
return deriveFormPayloadSchemaFromParam(node, "fields")
|
||||
|
||||
|
||||
def parse_graph_defined_output_schema(
|
||||
node: Dict[str, Any],
|
||||
output_port: Dict[str, Any],
|
||||
) -> Optional[PortSchema]:
|
||||
"""
|
||||
Resolve a node's output port to a concrete PortSchema.
|
||||
|
||||
Supports:
|
||||
- Static catalog name: ``schema: "ActionResult"``
|
||||
- Graph-defined: ``schema: {"kind": "fromGraph", "parameter": "fields"}``
|
||||
- Legacy: ``dynamic`` + ``deriveFrom`` on the port dict.
|
||||
"""
|
||||
if not isinstance(output_port, dict):
|
||||
return None
|
||||
schema_spec = output_port.get("schema")
|
||||
if isinstance(schema_spec, dict) and schema_spec.get("kind") == "fromGraph":
|
||||
param_key = str(schema_spec.get("parameter") or "fields")
|
||||
return deriveFormPayloadSchemaFromParam(node, param_key)
|
||||
if output_port.get("dynamic") and output_port.get("deriveFrom"):
|
||||
return deriveFormPayloadSchemaFromParam(node, str(output_port.get("deriveFrom")))
|
||||
if isinstance(schema_spec, str) and schema_spec:
|
||||
return PORT_TYPE_CATALOG.get(schema_spec)
|
||||
return None
|
||||
|
||||
|
||||
def resolve_output_schema_name(node: Dict[str, Any], output_port: Dict[str, Any]) -> str:
|
||||
"""Return a schema name for port compatibility / path listing."""
|
||||
derived = parse_graph_defined_output_schema(node, output_port)
|
||||
if derived:
|
||||
return derived.name
|
||||
spec = output_port.get("schema") if isinstance(output_port, dict) else None
|
||||
if isinstance(spec, str) and spec:
|
||||
return spec
|
||||
return "Any"
|
||||
|
||||
|
||||
def _deriveTransformSchema(node: Dict[str, Any]) -> Optional[PortSchema]:
|
||||
"""Derive output schema from transform mappings."""
|
||||
mappings = (node.get("parameters") or {}).get("mappings")
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ from fastapi import APIRouter, Depends, Path, Query, Body, Request, HTTPExceptio
|
|||
from fastapi.responses import JSONResponse, StreamingResponse, Response
|
||||
from modules.auth import limiter, getRequestContext, RequestContext
|
||||
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
|
||||
from modules.routes.routeHelpers import _applyFiltersAndSort
|
||||
from modules.routes.routeHelpers import applyFiltersAndSort
|
||||
|
||||
from modules.features.graphicalEditor.mainGraphicalEditor import getGraphicalEditorServices
|
||||
from modules.features.graphicalEditor.nodeRegistry import getNodeTypesForApi
|
||||
|
|
@ -26,6 +26,7 @@ from modules.workflows.automation2.runEnvelope import (
|
|||
normalize_run_envelope,
|
||||
)
|
||||
from modules.features.graphicalEditor.entryPoints import find_invocation
|
||||
from modules.features.graphicalEditor.upstreamPathsService import compute_upstream_paths
|
||||
from modules.shared.i18nRegistry import apiRouteContext, resolveText
|
||||
routeApiMsg = apiRouteContext("routeFeatureGraphicalEditor")
|
||||
|
||||
|
|
@ -135,6 +136,48 @@ def get_node_types(
|
|||
return result
|
||||
|
||||
|
||||
@router.post("/{instanceId}/upstream-paths")
|
||||
@limiter.limit("60/minute")
|
||||
def post_upstream_paths(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
body: Dict[str, Any] = Body(...),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Return pickable upstream DataRef paths for a node (draft graph in body)."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
graph = body.get("graph")
|
||||
node_id = body.get("nodeId")
|
||||
if not isinstance(graph, dict) or not node_id:
|
||||
raise HTTPException(status_code=400, detail=routeApiMsg("graph and nodeId are required"))
|
||||
paths = compute_upstream_paths(graph, str(node_id))
|
||||
return {"paths": paths}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/upstream-paths/{node_id}")
|
||||
@limiter.limit("60/minute")
|
||||
def get_upstream_paths_saved(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
node_id: str = Path(..., description="Target node id"),
|
||||
workflowId: str = Query(..., description="Workflow id whose saved graph is used"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Return upstream paths using the persisted workflow graph (same payload as POST variant)."""
|
||||
mandate_id = _validateInstanceAccess(instanceId, context)
|
||||
if not workflowId:
|
||||
raise HTTPException(status_code=400, detail=routeApiMsg("workflowId is required"))
|
||||
from modules.features.graphicalEditor.interfaceFeatureGraphicalEditor import getGraphicalEditorInterface
|
||||
|
||||
iface = getGraphicalEditorInterface(context.user, mandate_id, featureInstanceId=instanceId)
|
||||
wf = iface.getWorkflow(workflowId)
|
||||
if not wf:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Workflow not found"))
|
||||
graph = wf.get("graph") or {}
|
||||
paths = compute_upstream_paths(graph if isinstance(graph, dict) else {}, str(node_id))
|
||||
return {"paths": paths}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/options/user.connection")
|
||||
@limiter.limit("60/minute")
|
||||
def get_user_connection_options(
|
||||
|
|
@ -187,6 +230,65 @@ def get_user_connection_options(
|
|||
return {"options": options}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/options/feature.instance")
|
||||
@limiter.limit("60/minute")
|
||||
def get_feature_instance_options(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="GraphicalEditor feature instance ID (workflow context)"),
|
||||
featureCode: str = Query(..., description="Feature code to filter by (e.g. 'trustee', 'redmine', 'clickup')"),
|
||||
enabledOnly: bool = Query(True, description="If true (default), only enabled feature instances are returned"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Return mandate-scoped FeatureInstances for the given featureCode.
|
||||
|
||||
Used by node parameters with frontendType='featureInstance' (e.g. Trustee
|
||||
or Redmine nodes that need to bind to a specific tenant FeatureInstance).
|
||||
Always restricted to the calling user's mandate (derived from the workflow
|
||||
feature instance) so the picker never leaks foreign-mandate instances.
|
||||
|
||||
Response: { options: [ { value: "<id>", label: "<displayName> ([code])" } ] }
|
||||
"""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
if not context.user:
|
||||
raise HTTPException(status_code=401, detail=routeApiMsg("Authentication required"))
|
||||
code = (featureCode or "").strip().lower()
|
||||
if not code:
|
||||
raise HTTPException(status_code=400, detail=routeApiMsg("featureCode query parameter is required"))
|
||||
if not mandateId:
|
||||
return {"options": []}
|
||||
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
rootInterface = getRootInterface()
|
||||
try:
|
||||
instances = rootInterface.getFeatureInstancesByMandate(
|
||||
mandateId, enabledOnly=bool(enabledOnly)
|
||||
) or []
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"get_feature_instance_options: failed to load instances mandateId=%s: %s",
|
||||
mandateId, e, exc_info=True,
|
||||
)
|
||||
return {"options": []}
|
||||
|
||||
options: List[Dict[str, str]] = []
|
||||
for fi in instances:
|
||||
fiCode = (getattr(fi, "featureCode", "") or "").strip().lower()
|
||||
if fiCode != code:
|
||||
continue
|
||||
fiId = str(getattr(fi, "id", "") or "")
|
||||
if not fiId:
|
||||
continue
|
||||
rawLabel = getattr(fi, "label", None) or getattr(fi, "name", None) or fiId
|
||||
options.append({"value": fiId, "label": f"{rawLabel} ({fiCode})"})
|
||||
|
||||
logger.info(
|
||||
"graphicalEditor feature.instance options: instanceId=%s mandateId=%s "
|
||||
"featureCode=%s enabledOnly=%s -> %d options",
|
||||
instanceId, mandateId, code, enabledOnly, len(options),
|
||||
)
|
||||
return {"options": options}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/execute")
|
||||
@limiter.limit("30/minute")
|
||||
async def post_execute(
|
||||
|
|
@ -424,13 +526,35 @@ def get_templates(
|
|||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
scope: Optional[str] = Query(None, description="Filter by scope: user, instance, mandate, system"),
|
||||
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
|
||||
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
|
||||
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""List workflow templates with optional pagination."""
|
||||
"""List workflow templates with optional pagination.
|
||||
|
||||
Supports the FormGeneratorTable backend pattern:
|
||||
- default: paginated/filtered/sorted ``{items, pagination}`` response
|
||||
- ``mode=filterValues&column=X``: distinct values for column X (cross-filtered)
|
||||
- ``mode=ids``: all IDs matching current filters
|
||||
"""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||
templates = iface.getTemplates(scope=scope)
|
||||
|
||||
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import AutoWorkflow
|
||||
enrichRowsWithFkLabels(templates, AutoWorkflow)
|
||||
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory
|
||||
return handleFilterValuesInMemory(templates, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
from modules.routes.routeHelpers import handleIdsInMemory
|
||||
return handleIdsInMemory(templates, pagination)
|
||||
|
||||
paginationParams = None
|
||||
if pagination:
|
||||
try:
|
||||
|
|
@ -442,7 +566,7 @@ def get_templates(
|
|||
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
||||
|
||||
if paginationParams:
|
||||
filtered = _applyFiltersAndSort(templates, paginationParams)
|
||||
filtered = applyFiltersAndSort(templates, paginationParams)
|
||||
totalItems = len(filtered)
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
|
|
@ -813,6 +937,7 @@ async def _runEditorAgent(
|
|||
"\n\nAvailable tools (all valid — use whichever the user's intent calls for):"
|
||||
"\n Graph-mutating: readWorkflowGraph, listAvailableNodeTypes, "
|
||||
"describeNodeType, addNode, removeNode, connectNodes, setNodeParameter, "
|
||||
"listUpstreamPaths, bindNodeParameter, "
|
||||
"autoLayoutWorkflow, validateGraph."
|
||||
"\n Workflow lifecycle: createWorkflow (new empty workflow), "
|
||||
"updateWorkflowMetadata (rename / change description / tags / activate), "
|
||||
|
|
@ -844,6 +969,8 @@ async def _runEditorAgent(
|
|||
"description, sane defaults, or — for required user-connection fields — "
|
||||
"an actual connectionId). Do NOT pass position; the layout step handles it."
|
||||
"\n6. connectNodes — wire the nodes consistent with port schemas from describeNodeType."
|
||||
"\n6b. When a parameter must take data from an upstream node, call listUpstreamPaths(nodeId=target) "
|
||||
"then bindNodeParameter(producerNodeId, path, parameterName) — do not rely on implicit wire fill."
|
||||
"\n7. autoLayoutWorkflow — call exactly once as the LAST graph-mutating step so the "
|
||||
"canvas shows a readable top-down layout instead of overlapping boxes."
|
||||
"\n8. validateGraph — sanity check, then answer the user."
|
||||
|
|
@ -860,15 +987,15 @@ async def _runEditorAgent(
|
|||
|
||||
enrichedPrompt = prompt
|
||||
if dataSourceIds:
|
||||
from modules.features.workspace.routeFeatureWorkspace import _buildDataSourceContext
|
||||
from modules.features.workspace.routeFeatureWorkspace import buildDataSourceContext
|
||||
chatSvc = getService("chat", ctx)
|
||||
dsInfo = _buildDataSourceContext(chatSvc, dataSourceIds)
|
||||
dsInfo = buildDataSourceContext(chatSvc, dataSourceIds)
|
||||
if dsInfo:
|
||||
enrichedPrompt = f"{prompt}\n\n[Active Data Sources]\n{dsInfo}"
|
||||
|
||||
if featureDataSourceIds:
|
||||
from modules.features.workspace.routeFeatureWorkspace import _buildFeatureDataSourceContext
|
||||
fdsInfo = _buildFeatureDataSourceContext(featureDataSourceIds)
|
||||
from modules.features.workspace.routeFeatureWorkspace import buildFeatureDataSourceContext
|
||||
fdsInfo = buildFeatureDataSourceContext(featureDataSourceIds)
|
||||
if fdsInfo:
|
||||
enrichedPrompt = f"{enrichedPrompt}\n\n[Attached Feature Data Sources]\n{fdsInfo}"
|
||||
|
||||
|
|
@ -1133,9 +1260,17 @@ def get_workflows(
|
|||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
active: Optional[bool] = Query(None, description="Filter by active: true|false"),
|
||||
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
|
||||
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
|
||||
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""List all workflows for this feature instance."""
|
||||
"""List all workflows for this feature instance.
|
||||
|
||||
Supports the FormGeneratorTable backend pattern:
|
||||
- default: paginated/filtered/sorted ``{items, pagination}`` response
|
||||
- ``mode=filterValues&column=X``: distinct values for column X (cross-filtered)
|
||||
- ``mode=ids``: all IDs matching current filters (for "select all")
|
||||
"""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||
items = iface.getWorkflows(active=active)
|
||||
|
|
@ -1163,10 +1298,19 @@ def get_workflows(
|
|||
"runStatus": active_run.get("status") if active_run else None,
|
||||
"stuckAtNodeId": stuck_at_node_id,
|
||||
"stuckAtNodeLabel": stuck_at_node_label or stuck_at_node_id or "",
|
||||
"createdAt": wf.get("sysCreatedAt"),
|
||||
"lastStartedAt": last_started_at,
|
||||
})
|
||||
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory
|
||||
return handleFilterValuesInMemory(enriched, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
from modules.routes.routeHelpers import handleIdsInMemory
|
||||
return handleIdsInMemory(enriched, pagination)
|
||||
|
||||
paginationParams = None
|
||||
if pagination:
|
||||
try:
|
||||
|
|
@ -1178,7 +1322,7 @@ def get_workflows(
|
|||
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
||||
|
||||
if paginationParams:
|
||||
filtered = _applyFiltersAndSort(enriched, paginationParams)
|
||||
filtered = applyFiltersAndSort(enriched, paginationParams)
|
||||
totalItems = len(filtered)
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
|
|
|
|||
128
modules/features/graphicalEditor/upstreamPathsService.py
Normal file
128
modules/features/graphicalEditor/upstreamPathsService.py
Normal file
|
|
@ -0,0 +1,128 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
"""Compute pickable upstream paths for DataPicker / AI workflow tools."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, List, Set
|
||||
|
||||
from modules.features.graphicalEditor.nodeDefinitions import STATIC_NODE_TYPES
|
||||
from modules.features.graphicalEditor.portTypes import PORT_TYPE_CATALOG, PortSchema, parse_graph_defined_output_schema
|
||||
from modules.workflows.automation2.graphUtils import buildConnectionMap
|
||||
|
||||
_NODE_BY_TYPE = {n["id"]: n for n in STATIC_NODE_TYPES}
|
||||
|
||||
|
||||
def _paths_for_port_schema(schema: PortSchema, producer_node_id: str) -> List[Dict[str, Any]]:
|
||||
out: List[Dict[str, Any]] = []
|
||||
for field in schema.fields:
|
||||
path = [field.name]
|
||||
out.append(
|
||||
{
|
||||
"producerNodeId": producer_node_id,
|
||||
"path": path,
|
||||
"type": field.type,
|
||||
"label": ".".join(str(p) for p in path),
|
||||
"scopeOrigin": "data",
|
||||
}
|
||||
)
|
||||
out.append(
|
||||
{
|
||||
"producerNodeId": producer_node_id,
|
||||
"path": [],
|
||||
"type": schema.name,
|
||||
"label": "(whole output)",
|
||||
"scopeOrigin": "data",
|
||||
}
|
||||
)
|
||||
return out
|
||||
|
||||
|
||||
def _paths_for_schema(schema_name: str, producer_node_id: str) -> List[Dict[str, Any]]:
|
||||
if not schema_name or schema_name == "Transit":
|
||||
return []
|
||||
schema = PORT_TYPE_CATALOG.get(schema_name)
|
||||
if not schema:
|
||||
return []
|
||||
return _paths_for_port_schema(schema, producer_node_id)
|
||||
|
||||
|
||||
def compute_upstream_paths(graph: Dict[str, Any], target_node_id: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Return flattened first-level paths for every ancestor node's primary output schema.
|
||||
"""
|
||||
nodes = graph.get("nodes") or []
|
||||
connections = graph.get("connections") or []
|
||||
node_by_id = {n["id"]: n for n in nodes if n.get("id")}
|
||||
if target_node_id not in node_by_id:
|
||||
return []
|
||||
|
||||
conn_map = buildConnectionMap(connections)
|
||||
# predecessors: walk backwards along edges (target -> source)
|
||||
preds: Dict[str, Set[str]] = {}
|
||||
for tgt, pairs in conn_map.items():
|
||||
for src, _, _ in pairs:
|
||||
preds.setdefault(tgt, set()).add(src)
|
||||
|
||||
seen: Set[str] = set()
|
||||
stack = [target_node_id]
|
||||
ancestors: Set[str] = set()
|
||||
while stack:
|
||||
cur = stack.pop()
|
||||
for p in preds.get(cur, ()):
|
||||
if p not in seen:
|
||||
seen.add(p)
|
||||
ancestors.add(p)
|
||||
stack.append(p)
|
||||
|
||||
paths: List[Dict[str, Any]] = []
|
||||
for aid in sorted(ancestors):
|
||||
anode = node_by_id.get(aid)
|
||||
if not anode:
|
||||
continue
|
||||
nt = anode.get("type", "")
|
||||
ndef = _NODE_BY_TYPE.get(nt)
|
||||
if not ndef:
|
||||
continue
|
||||
out0 = (ndef.get("outputPorts") or {}).get(0, {})
|
||||
derived = parse_graph_defined_output_schema(anode, out0 if isinstance(out0, dict) else {})
|
||||
if derived:
|
||||
for entry in _paths_for_port_schema(derived, aid):
|
||||
entry["producerLabel"] = (anode.get("title") or "").strip() or aid
|
||||
paths.append(entry)
|
||||
else:
|
||||
raw_schema = out0.get("schema") if isinstance(out0, dict) else None
|
||||
schema_name = raw_schema if isinstance(raw_schema, str) and raw_schema else "ActionResult"
|
||||
for entry in _paths_for_schema(schema_name, aid):
|
||||
entry["producerLabel"] = (anode.get("title") or "").strip() or aid
|
||||
paths.append(entry)
|
||||
|
||||
# Lexical loop hints (flow.loop): any loop node in ancestors adds synthetic paths
|
||||
for aid in ancestors:
|
||||
anode = node_by_id.get(aid) or {}
|
||||
if anode.get("type") == "flow.loop":
|
||||
paths.extend(
|
||||
[
|
||||
{
|
||||
"producerNodeId": aid,
|
||||
"path": ["currentItem"],
|
||||
"type": "Any",
|
||||
"label": "loop.currentItem",
|
||||
"scopeOrigin": "loop",
|
||||
},
|
||||
{
|
||||
"producerNodeId": aid,
|
||||
"path": ["currentIndex"],
|
||||
"type": "int",
|
||||
"label": "loop.currentIndex",
|
||||
"scopeOrigin": "loop",
|
||||
},
|
||||
{
|
||||
"producerNodeId": aid,
|
||||
"path": ["count"],
|
||||
"type": "int",
|
||||
"label": "loop.count",
|
||||
"scopeOrigin": "loop",
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
return paths
|
||||
|
|
@ -32,7 +32,7 @@ class DataNeutraliserConfig(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -42,7 +42,7 @@ class DataNeutraliserConfig(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
userId: str = Field(
|
||||
|
|
@ -52,7 +52,7 @@ class DataNeutraliserConfig(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
enabled: bool = Field(
|
||||
|
|
@ -107,7 +107,7 @@ class DataNeutralizerAttributes(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -117,7 +117,7 @@ class DataNeutralizerAttributes(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
userId: str = Field(
|
||||
|
|
@ -127,7 +127,7 @@ class DataNeutralizerAttributes(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
originalText: str = Field(
|
||||
|
|
@ -142,7 +142,7 @@ class DataNeutralizerAttributes(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_management", "table": "FileItem"},
|
||||
"fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"},
|
||||
},
|
||||
)
|
||||
patternType: str = Field(
|
||||
|
|
@ -160,16 +160,16 @@ class DataNeutralizationSnapshot(BaseModel):
|
|||
)
|
||||
mandateId: str = Field(
|
||||
description="Mandate scope",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
default="",
|
||||
description="Feature instance scope",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
userId: str = Field(
|
||||
description="User who triggered neutralization",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
sourceLabel: str = Field(
|
||||
description="Human label, e.g. 'Prompt', 'Kontext', 'Nachricht 3'",
|
||||
|
|
|
|||
|
|
@ -288,7 +288,7 @@ class Kanton(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_realestate", "table": "Land"},
|
||||
"fk_target": {"db": "poweron_realestate", "table": "Land", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
abk: Optional[str] = Field(
|
||||
|
|
@ -348,7 +348,7 @@ class Gemeinde(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_realestate", "table": "Kanton"},
|
||||
"fk_target": {"db": "poweron_realestate", "table": "Kanton", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
plz: Optional[str] = Field(
|
||||
|
|
@ -398,7 +398,7 @@ class Parzelle(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandats-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -408,7 +408,7 @@ class Parzelle(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Feature-Instanz-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -472,7 +472,7 @@ class Parzelle(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_realestate", "table": "Gemeinde"},
|
||||
"fk_target": {"db": "poweron_realestate", "table": "Gemeinde", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -638,7 +638,7 @@ class Projekt(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandats-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -648,7 +648,7 @@ class Projekt(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Feature-Instanz-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
label: str = Field(
|
||||
|
|
|
|||
|
|
@ -228,31 +228,27 @@ def get_projects(
|
|||
recordFilter = {"featureInstanceId": instanceId}
|
||||
|
||||
if mode in ("filterValues", "ids"):
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels
|
||||
items = interface.getProjekte(recordFilter=recordFilter)
|
||||
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
enrichRowsWithFkLabels(itemDicts, Projekt)
|
||||
return handleFilterValuesInMemory(itemDicts, column, pagination)
|
||||
return handleIdsInMemory(itemDicts, pagination)
|
||||
|
||||
items = interface.getProjekte(recordFilter=recordFilter)
|
||||
paginationParams = _parsePagination(pagination)
|
||||
if paginationParams:
|
||||
if paginationParams.sort:
|
||||
for sort_field in reversed(paginationParams.sort):
|
||||
field_name = sort_field.field
|
||||
direction = sort_field.direction.lower()
|
||||
items.sort(
|
||||
key=lambda x: getattr(x, field_name, None),
|
||||
reverse=(direction == "desc")
|
||||
)
|
||||
total_items = len(items)
|
||||
from modules.routes.routeHelpers import applyFiltersAndSort
|
||||
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
|
||||
filtered = applyFiltersAndSort(itemDicts, paginationParams)
|
||||
total_items = len(filtered)
|
||||
total_pages = (total_items + paginationParams.pageSize - 1) // paginationParams.pageSize
|
||||
start_idx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
end_idx = start_idx + paginationParams.pageSize
|
||||
paginated_items = items[start_idx:end_idx]
|
||||
paginated_items = filtered[start_idx:end_idx]
|
||||
return PaginatedResponse(
|
||||
items=paginated_items,
|
||||
pagination=PaginationMetadata(
|
||||
|
|
@ -373,31 +369,27 @@ def get_parcels(
|
|||
recordFilter = {"featureInstanceId": instanceId}
|
||||
|
||||
if mode in ("filterValues", "ids"):
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels
|
||||
items = interface.getParzellen(recordFilter=recordFilter)
|
||||
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
enrichRowsWithFkLabels(itemDicts, Parzelle)
|
||||
return handleFilterValuesInMemory(itemDicts, column, pagination)
|
||||
return handleIdsInMemory(itemDicts, pagination)
|
||||
|
||||
items = interface.getParzellen(recordFilter=recordFilter)
|
||||
paginationParams = _parsePagination(pagination)
|
||||
if paginationParams:
|
||||
if paginationParams.sort:
|
||||
for sort_field in reversed(paginationParams.sort):
|
||||
field_name = sort_field.field
|
||||
direction = sort_field.direction.lower()
|
||||
items.sort(
|
||||
key=lambda x: getattr(x, field_name, None),
|
||||
reverse=(direction == "desc")
|
||||
)
|
||||
total_items = len(items)
|
||||
from modules.routes.routeHelpers import applyFiltersAndSort
|
||||
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
|
||||
filtered = applyFiltersAndSort(itemDicts, paginationParams)
|
||||
total_items = len(filtered)
|
||||
total_pages = (total_items + paginationParams.pageSize - 1) // paginationParams.pageSize
|
||||
start_idx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
end_idx = start_idx + paginationParams.pageSize
|
||||
paginated_items = items[start_idx:end_idx]
|
||||
paginated_items = filtered[start_idx:end_idx]
|
||||
return PaginatedResponse(
|
||||
items=paginated_items,
|
||||
pagination=PaginationMetadata(
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ class RedmineInstanceConfig(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
|
|
@ -86,7 +86,7 @@ class RedmineInstanceConfig(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
baseUrl: str = Field(
|
||||
|
|
@ -195,7 +195,7 @@ class RedmineTicketMirror(PowerOnModel):
|
|||
featureInstanceId: str = Field(
|
||||
description="FK -> FeatureInstance.id",
|
||||
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -226,14 +226,14 @@ class RedmineTicketMirror(PowerOnModel):
|
|||
closedOnTs: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Best-effort UTC epoch when the ticket transitioned to a closed status. Approximated as updatedOnTs for closed tickets at sync time; used by Stats to render the open-vs-total snapshot chart.",
|
||||
json_schema_extra={"label": "closedOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
|
||||
json_schema_extra={"label": "closedOn (epoch)", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
|
||||
)
|
||||
createdOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Erstellt am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
updatedOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Geaendert am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||
createdOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from createdOn (for SQL filtering)",
|
||||
json_schema_extra={"label": "createdOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
|
||||
json_schema_extra={"label": "createdOn (epoch)", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
|
||||
updatedOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from updatedOn (for SQL filtering)",
|
||||
json_schema_extra={"label": "updatedOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
|
||||
json_schema_extra={"label": "updatedOn (epoch)", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
|
||||
customFields: Optional[List[Dict[str, Any]]] = Field(
|
||||
default=None,
|
||||
description="List of {id,name,value} as returned by Redmine; stored as JSON",
|
||||
|
|
@ -270,7 +270,7 @@ class RedmineRelationMirror(PowerOnModel):
|
|||
featureInstanceId: str = Field(
|
||||
description="FK -> FeatureInstance.id",
|
||||
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
redmineRelationId: int = Field(
|
||||
description="Redmine relation id (unique per feature instance)",
|
||||
|
|
@ -468,17 +468,17 @@ class RedmineSyncResultDto(BaseModel):
|
|||
ticketsUpserted: int = 0
|
||||
relationsUpserted: int = 0
|
||||
durationMs: int = 0
|
||||
lastSyncAt: float
|
||||
lastSyncAt: float = Field(json_schema_extra={"frontend_type": "timestamp"})
|
||||
error: Optional[str] = None
|
||||
|
||||
|
||||
class RedmineSyncStatusDto(BaseModel):
|
||||
instanceId: str
|
||||
lastSyncAt: Optional[float] = None
|
||||
lastFullSyncAt: Optional[float] = None
|
||||
lastSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
lastFullSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
lastSyncDurationMs: Optional[int] = None
|
||||
lastSyncTicketCount: Optional[int] = None
|
||||
lastSyncErrorAt: Optional[float] = None
|
||||
lastSyncErrorAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
lastSyncErrorMessage: Optional[str] = None
|
||||
mirroredTicketCount: int = 0
|
||||
mirroredRelationCount: int = 0
|
||||
|
|
@ -513,11 +513,11 @@ class RedmineConfigDto(BaseModel):
|
|||
rootTrackerName: str = "Userstory"
|
||||
defaultPeriodValue: Optional[Dict[str, Any]] = None
|
||||
schemaCacheTtlSeconds: int = 24 * 60 * 60
|
||||
schemaCachedAt: Optional[float] = None
|
||||
schemaCachedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
isActive: bool = True
|
||||
lastConnectedAt: Optional[float] = None
|
||||
lastSyncAt: Optional[float] = None
|
||||
lastFullSyncAt: Optional[float] = None
|
||||
lastConnectedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
lastSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
lastFullSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
lastSyncTicketCount: Optional[int] = None
|
||||
lastSyncErrorMessage: Optional[str] = None
|
||||
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ from modules.features.redmine.interfaceFeatureRedmine import (
|
|||
RedmineObjects,
|
||||
getInterface,
|
||||
)
|
||||
from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
|
||||
from modules.features.redmine.serviceRedmineStatsCache import getStatsCache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -334,7 +334,7 @@ def getTicket(
|
|||
|
||||
def _invalidateCache(featureInstanceId: str) -> None:
|
||||
try:
|
||||
_getStatsCache().invalidateInstance(featureInstanceId)
|
||||
getStatsCache().invalidateInstance(featureInstanceId)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to invalidate stats cache for {featureInstanceId}: {e}")
|
||||
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ from modules.features.redmine.datamodelRedmine import (
|
|||
RedmineThroughputBucket,
|
||||
RedmineTicketDto,
|
||||
)
|
||||
from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
|
||||
from modules.features.redmine.serviceRedmineStatsCache import getStatsCache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -69,7 +69,7 @@ async def getStats(
|
|||
if status_norm not in {"*", "open", "closed"}:
|
||||
status_norm = "*"
|
||||
|
||||
cache = _getStatsCache()
|
||||
cache = getStatsCache()
|
||||
# Cache key now includes the new dimensions so different filter combos
|
||||
# don't collide. ``_freeze`` (in the cache module) hashes lists/sets
|
||||
# for us, so we can pass them directly as extra dimensions.
|
||||
|
|
|
|||
|
|
@ -123,7 +123,7 @@ class RedmineStatsCache:
|
|||
_globalCache: Optional[RedmineStatsCache] = None
|
||||
|
||||
|
||||
def _getStatsCache() -> RedmineStatsCache:
|
||||
def getStatsCache() -> RedmineStatsCache:
|
||||
"""Process-wide singleton."""
|
||||
global _globalCache
|
||||
if _globalCache is None:
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ from modules.features.redmine.datamodelRedmine import (
|
|||
RedmineTicketMirror,
|
||||
)
|
||||
from modules.features.redmine.interfaceFeatureRedmine import getInterface
|
||||
from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
|
||||
from modules.features.redmine.serviceRedmineStatsCache import getStatsCache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -79,6 +79,16 @@ async def runSync(
|
|||
|
||||
async with _lockFor(featureInstanceId):
|
||||
started = time.monotonic()
|
||||
|
||||
# CRITICAL: ensure the schema cache (especially the per-status
|
||||
# ``isClosed`` map) is populated BEFORE we iterate issues. Redmine's
|
||||
# /issues.json endpoint only returns ``{id, name}`` for the status
|
||||
# object -- the closed/open flag lives in /issue_statuses.json. If
|
||||
# the cache is empty here, every freshly-synced ticket would land
|
||||
# with ``isClosed=False`` and the Stats page would be useless.
|
||||
await _ensureSchemaWarm(currentUser, mandateId, featureInstanceId)
|
||||
cfg = iface.getConfig(featureInstanceId) # re-read to get warm cache
|
||||
|
||||
full = force or cfg.lastSyncAt is None
|
||||
updated_from_iso: Optional[str] = None
|
||||
if not full and cfg.lastSyncAt is not None:
|
||||
|
|
@ -107,6 +117,15 @@ async def runSync(
|
|||
tickets_upserted += _upsertTicket(iface, featureInstanceId, mandateId, issue, now_epoch)
|
||||
relations_upserted += _replaceRelations(iface, featureInstanceId, issue, now_epoch)
|
||||
|
||||
# Self-healing pass: re-apply ``isClosed`` to every mirrored ticket
|
||||
# using the now-warm schema cache. Fixes pre-existing rows that were
|
||||
# synced before the cache was populated (cheap; mirror-local only).
|
||||
flags_fixed = _rebuildIsClosedFromSchema(iface, featureInstanceId, now_epoch)
|
||||
if flags_fixed:
|
||||
logger.info(
|
||||
f"runSync({featureInstanceId}): corrected isClosed on {flags_fixed} mirror rows"
|
||||
)
|
||||
|
||||
duration_ms = int((time.monotonic() - started) * 1000)
|
||||
iface.recordSyncSuccess(
|
||||
featureInstanceId,
|
||||
|
|
@ -115,7 +134,7 @@ async def runSync(
|
|||
durationMs=duration_ms,
|
||||
lastSyncAt=now_epoch,
|
||||
)
|
||||
_getStatsCache().invalidateInstance(featureInstanceId)
|
||||
getStatsCache().invalidateInstance(featureInstanceId)
|
||||
|
||||
return RedmineSyncResultDto(
|
||||
instanceId=featureInstanceId,
|
||||
|
|
@ -169,7 +188,7 @@ async def upsertSingleTicket(
|
|||
now_epoch = time.time()
|
||||
_upsertTicket(iface, featureInstanceId, mandateId, issue, now_epoch)
|
||||
relations_upserted = _replaceRelations(iface, featureInstanceId, issue, now_epoch)
|
||||
_getStatsCache().invalidateInstance(featureInstanceId)
|
||||
getStatsCache().invalidateInstance(featureInstanceId)
|
||||
return relations_upserted
|
||||
|
||||
|
||||
|
|
@ -183,7 +202,7 @@ def deleteMirroredTicket(
|
|||
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
|
||||
deleted = iface.deleteMirroredTicket(featureInstanceId, int(issueId))
|
||||
iface.deleteMirroredRelationsForIssue(featureInstanceId, int(issueId))
|
||||
_getStatsCache().invalidateInstance(featureInstanceId)
|
||||
getStatsCache().invalidateInstance(featureInstanceId)
|
||||
return deleted
|
||||
|
||||
|
||||
|
|
@ -240,6 +259,80 @@ def _replaceRelations(
|
|||
return inserted
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Schema cache warm-up + post-sync isClosed correction
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def _ensureSchemaWarm(
|
||||
currentUser: User,
|
||||
mandateId: Optional[str],
|
||||
featureInstanceId: str,
|
||||
) -> None:
|
||||
"""Make sure ``cfg.schemaCache['statuses']`` exists with the per-status
|
||||
``isClosed`` flag. Called at the start of every sync because Redmine's
|
||||
``/issues.json`` doesn't expose ``is_closed`` on the inline status
|
||||
object, so we MUST resolve it via the schema.
|
||||
"""
|
||||
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
|
||||
cfg = iface.getConfig(featureInstanceId)
|
||||
if cfg is None:
|
||||
return
|
||||
statuses = (cfg.schemaCache or {}).get("statuses") or []
|
||||
if statuses:
|
||||
return
|
||||
# Lazy import to avoid a circular dependency at module load.
|
||||
from modules.features.redmine.serviceRedmine import getProjectMeta
|
||||
try:
|
||||
await getProjectMeta(currentUser, mandateId, featureInstanceId, forceRefresh=True)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"_ensureSchemaWarm({featureInstanceId}): could not warm schema cache: {e} "
|
||||
"-- isClosed flags may be inaccurate until next successful schema fetch."
|
||||
)
|
||||
|
||||
|
||||
def _rebuildIsClosedFromSchema(iface, featureInstanceId: str, nowEpoch: float) -> int:
|
||||
"""Walk the mirror once and fix ``isClosed`` (and ``closedOnTs``) for any
|
||||
ticket whose stored value disagrees with the current schema cache.
|
||||
|
||||
Returns the number of rows that were actually corrected. A no-op when
|
||||
the schema cache has no statuses (logged once, then the caller can
|
||||
decide whether to retry).
|
||||
"""
|
||||
cfg = iface.getConfig(featureInstanceId)
|
||||
if cfg is None:
|
||||
return 0
|
||||
statuses = (cfg.schemaCache or {}).get("statuses") or []
|
||||
if not statuses:
|
||||
return 0
|
||||
closed_ids = {int(s.get("id")) for s in statuses if s.get("id") is not None and s.get("isClosed")}
|
||||
rows = iface.listMirroredTickets(featureInstanceId)
|
||||
corrections = 0
|
||||
for row in rows:
|
||||
sid = row.get("statusId")
|
||||
if sid is None:
|
||||
continue
|
||||
should_be_closed = int(sid) in closed_ids
|
||||
if bool(row.get("isClosed")) == should_be_closed:
|
||||
continue
|
||||
# Only the closed/open flag (and the derived closedOnTs) are
|
||||
# touched here -- everything else came from Redmine and stays.
|
||||
update = {
|
||||
"isClosed": bool(should_be_closed),
|
||||
"closedOnTs": float(row.get("updatedOnTs")) if (should_be_closed and row.get("updatedOnTs") is not None) else None,
|
||||
"syncedAt": nowEpoch,
|
||||
}
|
||||
try:
|
||||
iface.upsertMirroredTicket(featureInstanceId, int(row.get("redmineId")), {**row, **update})
|
||||
corrections += 1
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"_rebuildIsClosedFromSchema({featureInstanceId}): could not fix ticket "
|
||||
f"#{row.get('redmineId')}: {e}"
|
||||
)
|
||||
return corrections
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Pure helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
|
|
|||
|
|
@ -4,7 +4,8 @@
|
|||
Teamsbot Feature - Data Models.
|
||||
Pydantic models for Teams Bot sessions, transcripts, bot responses, and configuration.
|
||||
"""
|
||||
from typing import Optional, List, Dict, Any
|
||||
from typing import Optional, List, Dict, Any, Literal
|
||||
from datetime import datetime, timezone
|
||||
from pydantic import BaseModel, Field
|
||||
from enum import Enum
|
||||
import uuid
|
||||
|
|
@ -12,6 +13,14 @@ import uuid
|
|||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Director Prompt Limits
|
||||
# ============================================================================
|
||||
|
||||
DIRECTOR_PROMPT_TEXT_LIMIT = 8000
|
||||
DIRECTOR_PROMPT_FILE_LIMIT = 10
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Enums
|
||||
# ============================================================================
|
||||
|
|
@ -82,8 +91,8 @@ class TeamsbotSession(PowerOnModel):
|
|||
meetingLink: str = Field(description="Teams meeting join link")
|
||||
botName: str = Field(default="AI Assistant", description="Display name of the bot in the meeting")
|
||||
status: TeamsbotSessionStatus = Field(default=TeamsbotSessionStatus.PENDING, description="Current session status")
|
||||
startedAt: Optional[str] = Field(default=None, description="ISO timestamp when session started")
|
||||
endedAt: Optional[str] = Field(default=None, description="ISO timestamp when session ended")
|
||||
startedAt: Optional[float] = Field(default=None, description="UTC unix timestamp when session started", json_schema_extra={"frontend_type": "timestamp"})
|
||||
endedAt: Optional[float] = Field(default=None, description="UTC unix timestamp when session ended", json_schema_extra={"frontend_type": "timestamp"})
|
||||
startedByUserId: str = Field(description="User ID who started the session")
|
||||
bridgeSessionId: Optional[str] = Field(default=None, description="Session ID on the .NET Media Bridge")
|
||||
meetingChatId: Optional[str] = Field(default=None, description="Teams meeting chat ID for Graph API messages")
|
||||
|
|
@ -100,7 +109,7 @@ class TeamsbotTranscript(PowerOnModel):
|
|||
sessionId: str = Field(description="Session ID (FK)")
|
||||
speaker: Optional[str] = Field(default=None, description="Speaker name or identifier")
|
||||
text: str = Field(description="Transcribed text")
|
||||
timestamp: str = Field(description="ISO timestamp of the speech segment")
|
||||
timestamp: float = Field(description="UTC unix timestamp of the speech segment", json_schema_extra={"frontend_type": "timestamp"})
|
||||
confidence: float = Field(default=0.0, ge=0.0, le=1.0, description="STT confidence score")
|
||||
language: Optional[str] = Field(default=None, description="Detected language code (e.g., de-DE)")
|
||||
isFinal: bool = Field(default=True, description="Whether this is a final or interim result")
|
||||
|
|
@ -119,7 +128,7 @@ class TeamsbotBotResponse(PowerOnModel):
|
|||
modelName: Optional[str] = Field(default=None, description="AI model used for this response")
|
||||
processingTime: float = Field(default=0.0, description="Processing time in seconds")
|
||||
priceCHF: float = Field(default=0.0, description="Cost of this AI call in CHF")
|
||||
timestamp: Optional[str] = Field(default=None, description="ISO timestamp of the response")
|
||||
timestamp: Optional[float] = Field(default=None, description="UTC unix timestamp of the response", json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
|
||||
# ============================================================================
|
||||
|
|
@ -267,6 +276,56 @@ class SpeechTeamsResponse(BaseModel):
|
|||
reasoning: str = Field(default="", description="Reasoning for the decision (for logging/debug)")
|
||||
detectedIntent: str = Field(default="none", description="Detected intent: addressed, question, proactive, stop, none")
|
||||
commands: Optional[List[TeamsbotCommand]] = Field(default=None, description="Optional list of commands to execute (e.g. toggle transcript, send chat, change language)")
|
||||
needsAgent: bool = Field(default=False, description="If True, escalate to agentService.runAgent for complex multi-step processing (web research, mail, etc.)")
|
||||
agentReason: Optional[str] = Field(default=None, description="Why escalation to the full agent is required (used as task brief for the agent)")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Director Prompts (private operator instructions sent during a live meeting)
|
||||
# ============================================================================
|
||||
|
||||
class TeamsbotDirectorPromptStatus(str, Enum):
|
||||
"""Lifecycle status of a Director Prompt."""
|
||||
QUEUED = "queued"
|
||||
RUNNING = "running"
|
||||
SUCCEEDED = "succeeded"
|
||||
FAILED = "failed"
|
||||
CONSUMED = "consumed" # one-shot consumed; persistent prompts stay active
|
||||
|
||||
|
||||
class TeamsbotDirectorPromptMode(str, Enum):
|
||||
"""How long a Director Prompt remains effective."""
|
||||
ONE_SHOT = "oneShot"
|
||||
PERSISTENT = "persistent"
|
||||
|
||||
|
||||
class TeamsbotDirectorPrompt(PowerOnModel):
|
||||
"""A private operator instruction injected into the bot during a live meeting.
|
||||
|
||||
Stored in PostgreSQL so it survives reconnects (persistent prompts) and is
|
||||
auditable. Visible only to the session owner via SSE; invisible to other
|
||||
meeting participants.
|
||||
"""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Director prompt ID")
|
||||
sessionId: str = Field(description="Teams Bot session ID (FK)")
|
||||
instanceId: str = Field(description="Feature instance ID (FK)")
|
||||
operatorUserId: str = Field(description="User ID of the operator who issued the prompt")
|
||||
text: str = Field(description="The director instruction text", max_length=DIRECTOR_PROMPT_TEXT_LIMIT)
|
||||
mode: TeamsbotDirectorPromptMode = Field(default=TeamsbotDirectorPromptMode.ONE_SHOT, description="oneShot or persistent")
|
||||
fileIds: List[str] = Field(default_factory=list, description="UDB-selected file/object IDs to attach as RAG context")
|
||||
status: TeamsbotDirectorPromptStatus = Field(default=TeamsbotDirectorPromptStatus.QUEUED, description="Lifecycle status")
|
||||
statusMessage: Optional[str] = Field(default=None, description="Optional error or status detail")
|
||||
createdAt: float = Field(default_factory=lambda: datetime.now(timezone.utc).timestamp(), description="UTC unix timestamp when created", json_schema_extra={"frontend_type": "timestamp"})
|
||||
consumedAt: Optional[float] = Field(default=None, description="UTC unix timestamp when consumed (one-shot) or marked done", json_schema_extra={"frontend_type": "timestamp"})
|
||||
agentRunId: Optional[str] = Field(default=None, description="Reference to the agent run that processed this prompt")
|
||||
responseText: Optional[str] = Field(default=None, description="Final agent text delivered to the meeting")
|
||||
|
||||
|
||||
class TeamsbotDirectorPromptCreateRequest(BaseModel):
|
||||
"""Request body for submitting a new Director Prompt."""
|
||||
text: str = Field(description="Director instruction text", min_length=1, max_length=DIRECTOR_PROMPT_TEXT_LIMIT)
|
||||
mode: TeamsbotDirectorPromptMode = Field(default=TeamsbotDirectorPromptMode.ONE_SHOT, description="oneShot or persistent")
|
||||
fileIds: List[str] = Field(default_factory=list, description="UDB file IDs to attach (max 10)")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
|
|
|
|||
|
|
@ -21,6 +21,9 @@ from .datamodelTeamsbot import (
|
|||
TeamsbotSystemBot,
|
||||
TeamsbotUserSettings,
|
||||
TeamsbotUserAccount,
|
||||
TeamsbotDirectorPrompt,
|
||||
TeamsbotDirectorPromptStatus,
|
||||
TeamsbotDirectorPromptMode,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -84,7 +87,7 @@ class TeamsbotObjects:
|
|||
if not includeEnded:
|
||||
records = [r for r in records if r.get("status") != TeamsbotSessionStatus.ENDED.value]
|
||||
# Sort by startedAt descending
|
||||
records.sort(key=lambda r: r.get("startedAt") or "", reverse=True)
|
||||
records.sort(key=lambda r: r.get("startedAt") or 0, reverse=True)
|
||||
return records
|
||||
|
||||
def getActiveSessions(self, instanceId: str) -> List[Dict[str, Any]]:
|
||||
|
|
@ -114,11 +117,10 @@ class TeamsbotObjects:
|
|||
return self.db.recordModify(TeamsbotSession, sessionId, updates)
|
||||
|
||||
def deleteSession(self, sessionId: str) -> bool:
|
||||
"""Delete a session and all related transcripts and responses."""
|
||||
# Delete related records first
|
||||
"""Delete a session and all related transcripts, responses and director prompts."""
|
||||
self._deleteTranscriptsBySession(sessionId)
|
||||
self._deleteResponsesBySession(sessionId)
|
||||
# Delete session
|
||||
self._deletePromptsBySession(sessionId)
|
||||
return self.db.recordDelete(TeamsbotSession, sessionId)
|
||||
|
||||
# =========================================================================
|
||||
|
|
@ -131,7 +133,7 @@ class TeamsbotObjects:
|
|||
TeamsbotTranscript,
|
||||
recordFilter={"sessionId": sessionId},
|
||||
)
|
||||
records.sort(key=lambda r: r.get("timestamp") or "")
|
||||
records.sort(key=lambda r: r.get("timestamp") or 0)
|
||||
if offset:
|
||||
records = records[offset:]
|
||||
if limit:
|
||||
|
|
@ -144,7 +146,7 @@ class TeamsbotObjects:
|
|||
TeamsbotTranscript,
|
||||
recordFilter={"sessionId": sessionId},
|
||||
)
|
||||
records.sort(key=lambda r: r.get("timestamp") or "")
|
||||
records.sort(key=lambda r: r.get("timestamp") or 0)
|
||||
return records[-count:]
|
||||
|
||||
def createTranscript(self, transcriptData: Dict[str, Any]) -> Dict[str, Any]:
|
||||
|
|
@ -174,7 +176,7 @@ class TeamsbotObjects:
|
|||
TeamsbotBotResponse,
|
||||
recordFilter={"sessionId": sessionId},
|
||||
)
|
||||
records.sort(key=lambda r: r.get("timestamp") or "")
|
||||
records.sort(key=lambda r: r.get("timestamp") or 0)
|
||||
return records
|
||||
|
||||
def createBotResponse(self, responseData: Dict[str, Any]) -> Dict[str, Any]:
|
||||
|
|
@ -272,6 +274,62 @@ class TeamsbotObjects:
|
|||
"""Delete saved MS credentials."""
|
||||
return self.db.recordDelete(TeamsbotUserAccount, accountId)
|
||||
|
||||
# =========================================================================
|
||||
# Director Prompts (private operator instructions during a live meeting)
|
||||
# =========================================================================
|
||||
|
||||
def createDirectorPrompt(self, promptData: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Create a new director prompt record."""
|
||||
return self.db.recordCreate(TeamsbotDirectorPrompt, promptData)
|
||||
|
||||
def getDirectorPrompt(self, promptId: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get a single director prompt by ID."""
|
||||
records = self.db.getRecordset(TeamsbotDirectorPrompt, recordFilter={"id": promptId})
|
||||
return records[0] if records else None
|
||||
|
||||
def getDirectorPrompts(self, sessionId: str, operatorUserId: str | None = None) -> List[Dict[str, Any]]:
|
||||
"""Get all director prompts for a session, optionally filtered by operator."""
|
||||
recordFilter: Dict[str, Any] = {"sessionId": sessionId}
|
||||
if operatorUserId:
|
||||
recordFilter["operatorUserId"] = operatorUserId
|
||||
records = self.db.getRecordset(TeamsbotDirectorPrompt, recordFilter=recordFilter)
|
||||
records.sort(key=lambda r: r.get("createdAt") or 0)
|
||||
return records
|
||||
|
||||
def getActivePersistentPrompts(self, sessionId: str) -> List[Dict[str, Any]]:
|
||||
"""Get persistent prompts that are still active (not consumed/failed) for a session."""
|
||||
records = self.db.getRecordset(
|
||||
TeamsbotDirectorPrompt,
|
||||
recordFilter={
|
||||
"sessionId": sessionId,
|
||||
"mode": TeamsbotDirectorPromptMode.PERSISTENT.value,
|
||||
},
|
||||
)
|
||||
terminal = {
|
||||
TeamsbotDirectorPromptStatus.CONSUMED.value,
|
||||
TeamsbotDirectorPromptStatus.FAILED.value,
|
||||
}
|
||||
active = [r for r in records if r.get("status") not in terminal]
|
||||
active.sort(key=lambda r: r.get("createdAt") or 0)
|
||||
return active
|
||||
|
||||
def updateDirectorPrompt(self, promptId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Update a director prompt (status, response text, etc.)."""
|
||||
return self.db.recordModify(TeamsbotDirectorPrompt, promptId, updates)
|
||||
|
||||
def deleteDirectorPrompt(self, promptId: str) -> bool:
|
||||
"""Delete a director prompt (e.g. when operator removes a persistent prompt)."""
|
||||
return self.db.recordDelete(TeamsbotDirectorPrompt, promptId)
|
||||
|
||||
def _deletePromptsBySession(self, sessionId: str) -> int:
|
||||
"""Delete all director prompts for a session (called from deleteSession)."""
|
||||
records = self.db.getRecordset(TeamsbotDirectorPrompt, recordFilter={"sessionId": sessionId})
|
||||
count = 0
|
||||
for record in records:
|
||||
self.db.recordDelete(TeamsbotDirectorPrompt, record.get("id"))
|
||||
count += 1
|
||||
return count
|
||||
|
||||
# =========================================================================
|
||||
# Stats / Aggregation
|
||||
# =========================================================================
|
||||
|
|
|
|||
|
|
@ -36,6 +36,11 @@ from .datamodelTeamsbot import (
|
|||
TeamsbotUserAccount,
|
||||
TeamsbotResponseChannel,
|
||||
TeamsbotResponseMode,
|
||||
TeamsbotDirectorPromptCreateRequest,
|
||||
TeamsbotDirectorPromptMode,
|
||||
TeamsbotDirectorPromptStatus,
|
||||
DIRECTOR_PROMPT_FILE_LIMIT,
|
||||
DIRECTOR_PROMPT_TEXT_LIMIT,
|
||||
)
|
||||
|
||||
# Import service
|
||||
|
|
@ -378,16 +383,21 @@ async def streamSession(
|
|||
|
||||
async def _eventGenerator():
|
||||
"""Generate SSE events from the session event queue."""
|
||||
from .service import _sessionEvents
|
||||
from .service import sessionEvents
|
||||
|
||||
# Send initial session state
|
||||
yield f"data: {json.dumps({'type': 'sessionState', 'data': session})}\n\n"
|
||||
|
||||
# Send current bot WebSocket connection state so the operator UI can
|
||||
# render the live indicator without waiting for the next connect/disconnect.
|
||||
from .service import getActiveService as _getActiveService
|
||||
yield f"data: {json.dumps({'type': 'botConnectionState', 'data': {'connected': _getActiveService(sessionId) is not None}})}\n\n"
|
||||
|
||||
# Stream events
|
||||
eventQueue = _sessionEvents.get(sessionId)
|
||||
eventQueue = sessionEvents.get(sessionId)
|
||||
if not eventQueue:
|
||||
_sessionEvents[sessionId] = asyncio.Queue()
|
||||
eventQueue = _sessionEvents[sessionId]
|
||||
sessionEvents[sessionId] = asyncio.Queue()
|
||||
eventQueue = sessionEvents[sessionId]
|
||||
|
||||
try:
|
||||
while True:
|
||||
|
|
@ -800,8 +810,8 @@ async def deleteUserAccount(
|
|||
# MFA Code Submission (relayed to active bot session)
|
||||
# =========================================================================
|
||||
|
||||
_mfaCodeQueues: dict = {}
|
||||
_mfaWaitTasks: dict = {}
|
||||
mfaCodeQueues: dict = {}
|
||||
mfaWaitTasks: dict = {}
|
||||
|
||||
@router.post("/{instanceId}/sessions/{sessionId}/mfa")
|
||||
@limiter.limit("10/minute")
|
||||
|
|
@ -824,7 +834,7 @@ async def submitMfaCode(
|
|||
|
||||
logger.info(f"MFA submission for session {sessionId}: action={mfaAction}, codeLen={len(mfaCode)}")
|
||||
|
||||
queue = _mfaCodeQueues.get(sessionId)
|
||||
queue = mfaCodeQueues.get(sessionId)
|
||||
if queue:
|
||||
await queue.put({"action": mfaAction, "code": mfaCode})
|
||||
return {"submitted": True}
|
||||
|
|
@ -832,6 +842,132 @@ async def submitMfaCode(
|
|||
raise HTTPException(status_code=404, detail=routeApiMsg("No active MFA challenge for this session"))
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Director Prompts (private operator instructions during a live meeting)
|
||||
# =========================================================================
|
||||
|
||||
@router.post("/{instanceId}/sessions/{sessionId}/directorPrompts")
|
||||
@limiter.limit("30/minute")
|
||||
async def submitDirectorPrompt(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
sessionId: str,
|
||||
body: TeamsbotDirectorPromptCreateRequest,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Submit a private director prompt to the running bot. Triggers the
|
||||
full agent path (web, mail, RAG, etc.) and delivers the answer into the
|
||||
meeting via TTS + chat. Only the session owner can submit prompts."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
|
||||
session = interface.getSession(sessionId)
|
||||
if not session:
|
||||
raise HTTPException(status_code=404, detail=f"Session '{sessionId}' not found")
|
||||
_validateSessionOwnership(session, context)
|
||||
|
||||
if session.get("status") not in (
|
||||
TeamsbotSessionStatus.ACTIVE.value,
|
||||
TeamsbotSessionStatus.JOINING.value,
|
||||
):
|
||||
raise HTTPException(status_code=400, detail=routeApiMsg("Session is not active"))
|
||||
|
||||
text = (body.text or "").strip()
|
||||
if not text:
|
||||
raise HTTPException(status_code=400, detail=routeApiMsg("Prompt text is required"))
|
||||
if len(text) > DIRECTOR_PROMPT_TEXT_LIMIT:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=routeApiMsg(f"Prompt text exceeds limit of {DIRECTOR_PROMPT_TEXT_LIMIT} characters"),
|
||||
)
|
||||
fileIds = list(body.fileIds or [])
|
||||
if len(fileIds) > DIRECTOR_PROMPT_FILE_LIMIT:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=routeApiMsg(f"Too many files ({len(fileIds)}); max {DIRECTOR_PROMPT_FILE_LIMIT}"),
|
||||
)
|
||||
|
||||
from .service import getActiveService
|
||||
service = getActiveService(sessionId)
|
||||
if not service:
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=routeApiMsg(
|
||||
"Bot is not yet live in the meeting (no WebSocket connection). "
|
||||
"Wait until the bot status indicator turns green and try again."
|
||||
),
|
||||
)
|
||||
|
||||
created = await service.submitDirectorPrompt(
|
||||
sessionId=sessionId,
|
||||
operatorUserId=str(context.user.id),
|
||||
text=text,
|
||||
mode=body.mode,
|
||||
fileIds=fileIds,
|
||||
)
|
||||
return {"prompt": created}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/sessions/{sessionId}/directorPrompts")
|
||||
@limiter.limit("30/minute")
|
||||
async def listDirectorPrompts(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
sessionId: str,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""List director prompts for a session (only operator's own prompts)."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
|
||||
session = interface.getSession(sessionId)
|
||||
if not session:
|
||||
raise HTTPException(status_code=404, detail=f"Session '{sessionId}' not found")
|
||||
_validateSessionOwnership(session, context)
|
||||
|
||||
operatorUserId = None if context.isPlatformAdmin else str(context.user.id)
|
||||
prompts = interface.getDirectorPrompts(sessionId, operatorUserId=operatorUserId)
|
||||
return {"prompts": prompts}
|
||||
|
||||
|
||||
@router.delete("/{instanceId}/sessions/{sessionId}/directorPrompts/{promptId}")
|
||||
@limiter.limit("30/minute")
|
||||
async def deleteDirectorPrompt(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
sessionId: str,
|
||||
promptId: str,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Remove a (typically persistent) director prompt. Marks it consumed so
|
||||
it no longer influences the bot. The DB record is kept for audit."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
|
||||
session = interface.getSession(sessionId)
|
||||
if not session:
|
||||
raise HTTPException(status_code=404, detail=f"Session '{sessionId}' not found")
|
||||
_validateSessionOwnership(session, context)
|
||||
|
||||
prompt = interface.getDirectorPrompt(promptId)
|
||||
if not prompt or prompt.get("sessionId") != sessionId:
|
||||
raise HTTPException(status_code=404, detail=f"Prompt '{promptId}' not found")
|
||||
if not context.isPlatformAdmin and prompt.get("operatorUserId") != str(context.user.id):
|
||||
raise HTTPException(status_code=404, detail=f"Prompt '{promptId}' not found")
|
||||
|
||||
from .service import getActiveService
|
||||
service = getActiveService(sessionId)
|
||||
if service:
|
||||
await service.removePersistentPrompt(promptId)
|
||||
else:
|
||||
# Bot not connected: mark consumed directly
|
||||
interface.updateDirectorPrompt(promptId, {
|
||||
"status": TeamsbotDirectorPromptStatus.CONSUMED.value,
|
||||
"statusMessage": "Removed by operator (bot offline)",
|
||||
})
|
||||
return {"deleted": True, "promptId": promptId}
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Voice Test Endpoint
|
||||
# =========================================================================
|
||||
|
|
@ -845,7 +981,7 @@ async def testVoice(
|
|||
):
|
||||
"""Test TTS voice with AI-generated sample text in the correct language."""
|
||||
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
||||
from modules.serviceCenter.services.serviceAi.mainServiceAi import AiService
|
||||
from .service import createAiService
|
||||
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
|
||||
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
|
|
@ -856,12 +992,7 @@ async def testVoice(
|
|||
botName = body.get("botName", "AI Assistant")
|
||||
|
||||
try:
|
||||
# Generate test text dynamically via AI in the correct language
|
||||
serviceContext = type('Ctx', (), {
|
||||
'user': context.user, 'mandateId': mandateId,
|
||||
'featureInstanceId': instanceId, 'featureCode': 'teamsbot'
|
||||
})()
|
||||
aiService = AiService(serviceCenter=serviceContext)
|
||||
aiService = createAiService(context.user, mandateId, instanceId)
|
||||
await aiService.ensureAiObjectsInitialized()
|
||||
|
||||
aiRequest = AiCallRequest(
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -8,6 +8,7 @@ Encapsulates: config loading -> connector resolution -> duplicate check -> push
|
|||
import json
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime as _dt, timezone as _tz
|
||||
from typing import List, Dict, Any, Optional
|
||||
|
||||
from .accountingConnectorBase import (
|
||||
|
|
@ -16,7 +17,7 @@ from .accountingConnectorBase import (
|
|||
AccountingChart,
|
||||
SyncResult,
|
||||
)
|
||||
from .accountingRegistry import _getAccountingRegistry
|
||||
from .accountingRegistry import getAccountingRegistry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -26,7 +27,7 @@ class AccountingBridge:
|
|||
|
||||
def __init__(self, trusteeInterface):
|
||||
self._trusteeInterface = trusteeInterface
|
||||
self._registry = _getAccountingRegistry()
|
||||
self._registry = getAccountingRegistry()
|
||||
|
||||
async def getActiveConfig(self, featureInstanceId: str) -> Optional[Dict[str, Any]]:
|
||||
"""Load the active TrusteeAccountingConfig for a feature instance."""
|
||||
|
|
@ -103,9 +104,12 @@ class AccountingBridge:
|
|||
costCenter=position.get("costCenter"),
|
||||
))
|
||||
|
||||
valutaTs = position.get("valuta")
|
||||
bookingDateStr = _dt.fromtimestamp(valutaTs, tz=_tz.utc).strftime("%Y-%m-%d") if valutaTs else ""
|
||||
|
||||
return AccountingBooking(
|
||||
reference=position.get("bookingReference") or position.get("id", ""),
|
||||
bookingDate=position.get("valuta") or "",
|
||||
bookingDate=bookingDateStr,
|
||||
description=position.get("desc", ""),
|
||||
lines=lines,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -39,6 +39,26 @@ class AccountingChart(BaseModel):
|
|||
accountType: Optional[str] = None
|
||||
|
||||
|
||||
class AccountingPeriodBalance(BaseModel):
|
||||
"""Balance snapshot for one account in one period.
|
||||
|
||||
Mirrors the `TrusteeDataAccountBalance` table 1:1 so
|
||||
`accountingDataSync._persistBalances` can persist connector output without
|
||||
re-mapping. `closingBalance` is always the *cumulative* balance at the end
|
||||
of the period (NOT the period's net movement). `periodMonth=0` denotes the
|
||||
annual bucket (closing balance per fiscal year-end).
|
||||
"""
|
||||
accountNumber: str
|
||||
periodYear: int
|
||||
periodMonth: int = 0
|
||||
openingBalance: float = 0.0
|
||||
debitTotal: float = 0.0
|
||||
creditTotal: float = 0.0
|
||||
closingBalance: float = 0.0
|
||||
currency: str = "CHF"
|
||||
asOfDate: Optional[str] = None
|
||||
|
||||
|
||||
class SyncResult(BaseModel):
|
||||
"""Result of a sync operation."""
|
||||
success: bool
|
||||
|
|
@ -126,6 +146,31 @@ class BaseAccountingConnector(ABC):
|
|||
accountNumbers: pre-fetched account numbers (avoids redundant API call). Override in connectors that support it."""
|
||||
return []
|
||||
|
||||
async def getAccountBalances(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
years: List[int],
|
||||
accountNumbers: Optional[List[str]] = None,
|
||||
) -> List[AccountingPeriodBalance]:
|
||||
"""Read closing balances per account and period from the external system.
|
||||
|
||||
Contract:
|
||||
- One row per (accountNumber, periodYear, periodMonth).
|
||||
- `periodMonth=0` => annual bucket (closing balance per fiscal year-end).
|
||||
- `periodMonth=1..12` => closing balance per end of that calendar month.
|
||||
- `closingBalance` MUST be the *cumulative* balance at period end,
|
||||
including all prior-year carry-over and yearend bookings -- NOT the
|
||||
period's net movement.
|
||||
- `openingBalance` MUST be the cumulative balance at period start
|
||||
(= previous period's closingBalance).
|
||||
|
||||
Default returns []; `AccountingDataSync` will then fall back to a
|
||||
local cumulative aggregation from journal lines. Override in
|
||||
connectors that can fetch authoritative balances from the source
|
||||
system (e.g. RMA `/gl/saldo`).
|
||||
"""
|
||||
return []
|
||||
|
||||
async def uploadDocument(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
|
|
|
|||
|
|
@ -21,11 +21,12 @@ import logging
|
|||
import os
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from datetime import datetime as _dt, timezone as _tz
|
||||
from pathlib import Path
|
||||
from typing import Callable, Dict, Any, List, Optional, Type
|
||||
|
||||
from .accountingConnectorBase import BaseAccountingConnector
|
||||
from .accountingRegistry import _getAccountingRegistry
|
||||
from .accountingRegistry import getAccountingRegistry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -33,6 +34,89 @@ logger = logging.getLogger(__name__)
|
|||
_HEARTBEAT_EVERY = 500
|
||||
|
||||
|
||||
def _isoDateToTimestamp(raw: Any) -> Optional[float]:
|
||||
"""Convert an ISO date string (``YYYY-MM-DD`` or datetime) to a UTC
|
||||
midnight unix timestamp. Returns ``None`` only when *raw* is
|
||||
falsy/None. Raises ``ValueError`` for non-empty but unparseable
|
||||
values so import errors are never silently swallowed.
|
||||
"""
|
||||
if raw is None or raw == "":
|
||||
return None
|
||||
s = str(raw).split("T")[0].strip()[:10]
|
||||
if not s:
|
||||
return None
|
||||
try:
|
||||
return _dt.strptime(s, "%Y-%m-%d").replace(tzinfo=_tz.utc).timestamp()
|
||||
except ValueError:
|
||||
raise ValueError(f"Cannot parse bookingDate '{raw}' as YYYY-MM-DD")
|
||||
|
||||
|
||||
def _isIncomeStatementAccount(accountNumber: str) -> bool:
|
||||
"""Swiss KMU-Kontenrahmen heuristic: 1xxx + 2xxx -> balance sheet
|
||||
(cumulative carry-over across years); 3xxx..9xxx -> income statement
|
||||
(reset to 0 at fiscal-year start). Used by the local fallback only;
|
||||
when a connector returns balances, those values are used verbatim.
|
||||
"""
|
||||
a = (accountNumber or "").strip()
|
||||
if not a or not a[0].isdigit():
|
||||
return False
|
||||
return a[0] not in ("1", "2")
|
||||
|
||||
|
||||
def _resolveBalanceYears(
|
||||
dateFrom: Optional[str],
|
||||
dateTo: Optional[str],
|
||||
oldestBookingDate: Optional[str],
|
||||
newestBookingDate: Optional[str],
|
||||
) -> List[int]:
|
||||
"""Derive the list of years for which the connector should compute balances.
|
||||
|
||||
Prefers the ``dateFrom``/``dateTo`` import window the user requested. Falls
|
||||
back to the actual oldest/newest booking date observed in the imported
|
||||
journal (so e.g. a `dateTo=None` import still produces balances for every
|
||||
year that has data). If nothing is known, returns the current year as a
|
||||
sensible default.
|
||||
"""
|
||||
def _yearOf(s: Optional[str]) -> Optional[int]:
|
||||
if not s:
|
||||
return None
|
||||
try:
|
||||
return int(str(s)[:4])
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
fromYear = _yearOf(dateFrom) or _yearOf(oldestBookingDate)
|
||||
toYear = _yearOf(dateTo) or _yearOf(newestBookingDate)
|
||||
if fromYear is None and toYear is None:
|
||||
return [time.gmtime().tm_year]
|
||||
if fromYear is None:
|
||||
fromYear = toYear
|
||||
if toYear is None:
|
||||
toYear = fromYear
|
||||
if toYear < fromYear:
|
||||
fromYear, toYear = toYear, fromYear
|
||||
return list(range(fromYear, toYear + 1))
|
||||
|
||||
|
||||
def _balanceModelToRow(b: Any, scope: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Map an ``AccountingPeriodBalance`` (or compatible dict) to a DB row."""
|
||||
if isinstance(b, dict):
|
||||
get = b.get
|
||||
else:
|
||||
get = lambda k, default=None: getattr(b, k, default)
|
||||
return {
|
||||
"accountNumber": str(get("accountNumber", "") or ""),
|
||||
"periodYear": int(get("periodYear", 0) or 0),
|
||||
"periodMonth": int(get("periodMonth", 0) or 0),
|
||||
"openingBalance": round(float(get("openingBalance", 0) or 0), 2),
|
||||
"debitTotal": round(float(get("debitTotal", 0) or 0), 2),
|
||||
"creditTotal": round(float(get("creditTotal", 0) or 0), 2),
|
||||
"closingBalance": round(float(get("closingBalance", 0) or 0), 2),
|
||||
"currency": str(get("currency", "CHF") or "CHF"),
|
||||
**scope,
|
||||
}
|
||||
|
||||
|
||||
def _isDebugDumpEnabled() -> bool:
|
||||
"""Whether to write raw connector payloads to disk for offline inspection.
|
||||
|
||||
|
|
@ -101,7 +185,7 @@ class AccountingDataSync:
|
|||
|
||||
def __init__(self, trusteeInterface):
|
||||
self._if = trusteeInterface
|
||||
self._registry = _getAccountingRegistry()
|
||||
self._registry = getAccountingRegistry()
|
||||
|
||||
async def importData(
|
||||
self,
|
||||
|
|
@ -144,6 +228,8 @@ class AccountingDataSync:
|
|||
"journalLines": 0,
|
||||
"contacts": 0,
|
||||
"accountBalances": 0,
|
||||
"oldestBookingDate": None,
|
||||
"newestBookingDate": None,
|
||||
"errors": [],
|
||||
"startedAt": time.time(),
|
||||
}
|
||||
|
|
@ -211,12 +297,14 @@ class AccountingDataSync:
|
|||
)
|
||||
_dumpSyncData("journalEntries", rawEntries)
|
||||
_progress(60, f"Speichere {len(rawEntries)} Buchungssaetze...")
|
||||
entriesCount, linesCount = await asyncio.to_thread(
|
||||
entriesCount, linesCount, oldestDate, newestDate = await asyncio.to_thread(
|
||||
self._persistJournal, rawEntries, scope, featureInstanceId,
|
||||
TrusteeDataJournalEntry, TrusteeDataJournalLine,
|
||||
)
|
||||
summary["journalEntries"] = entriesCount
|
||||
summary["journalLines"] = linesCount
|
||||
summary["oldestBookingDate"] = oldestDate
|
||||
summary["newestBookingDate"] = newestDate
|
||||
_progress(65, f"{entriesCount} Saetze + {linesCount} Buchungszeilen gespeichert.")
|
||||
except Exception as e:
|
||||
logger.error(f"Import journal entries failed: {e}", exc_info=True)
|
||||
|
|
@ -242,18 +330,39 @@ class AccountingDataSync:
|
|||
logger.error(f"Import contacts failed: {e}", exc_info=True)
|
||||
summary["errors"].append(f"Contacts: {e}")
|
||||
|
||||
# ---- Phase 4: Compute account balances ----
|
||||
# Progress budget: 90-95 %. Pure DB aggregation, no external calls.
|
||||
# ---- Phase 4: Account balances ----
|
||||
# Progress budget: 88-95 %. Connector first (RMA -> /gl/saldo, Bexio
|
||||
# & Abacus -> aggregated journal). On empty/failed connector output
|
||||
# we fall back to a *correct* cumulative aggregation from the
|
||||
# journal lines we just persisted.
|
||||
connectorBalances: list = []
|
||||
balanceSource = "local-fallback"
|
||||
try:
|
||||
_progress(90, "Berechne Kontensaldi...")
|
||||
_progress(88, "Lade Kontensaldi vom Buchhaltungssystem...")
|
||||
balanceYears = _resolveBalanceYears(dateFrom, dateTo, summary.get("oldestBookingDate"), summary.get("newestBookingDate"))
|
||||
connectorBalances = await connector.getAccountBalances(
|
||||
connConfig,
|
||||
years=balanceYears,
|
||||
accountNumbers=fetchedAccountNumbers or None,
|
||||
)
|
||||
_dumpSyncData("accountBalances", connectorBalances)
|
||||
if connectorBalances:
|
||||
balanceSource = "connector"
|
||||
except Exception as e:
|
||||
logger.warning(f"Connector getAccountBalances failed, will use local fallback: {e}", exc_info=True)
|
||||
summary["errors"].append(f"Balances connector: {e}")
|
||||
|
||||
try:
|
||||
_progress(92, "Speichere Kontensaldi...")
|
||||
balanceCount = await asyncio.to_thread(
|
||||
self._persistBalances, featureInstanceId, mandateId,
|
||||
TrusteeDataJournalEntry, TrusteeDataJournalLine, TrusteeDataAccountBalance,
|
||||
connectorBalances, balanceSource,
|
||||
)
|
||||
summary["accountBalances"] = balanceCount
|
||||
_progress(95, f"{balanceCount} Saldi berechnet.")
|
||||
_progress(95, f"{balanceCount} Saldi gespeichert (source={balanceSource}).")
|
||||
except Exception as e:
|
||||
logger.error(f"Compute balances failed: {e}", exc_info=True)
|
||||
logger.error(f"Persist balances failed: {e}", exc_info=True)
|
||||
summary["errors"].append(f"Balances: {e}")
|
||||
|
||||
cfgId = cfgRecord.get("id")
|
||||
|
|
@ -269,14 +378,19 @@ class AccountingDataSync:
|
|||
logger.exception(f"AccountingDataSync: failed to write core lastSync* fields for cfg {cfgId}: {coreErr}")
|
||||
summary["errors"].append(f"Persist lastSync core: {coreErr}")
|
||||
extPayload = {
|
||||
"lastSyncDateFrom": dateFrom,
|
||||
"lastSyncDateTo": dateTo,
|
||||
"lastSyncDateFrom": _isoDateToTimestamp(dateFrom),
|
||||
"lastSyncDateTo": _isoDateToTimestamp(dateTo),
|
||||
"lastSyncCounts": {
|
||||
"accounts": int(summary.get("accounts", 0)),
|
||||
"journalEntries": int(summary.get("journalEntries", 0)),
|
||||
"journalLines": int(summary.get("journalLines", 0)),
|
||||
"contacts": int(summary.get("contacts", 0)),
|
||||
"accountBalances": int(summary.get("accountBalances", 0)),
|
||||
# Actual oldest/newest booking date observed in the
|
||||
# imported journal entries. Lets the user verify that the
|
||||
# full requested window was returned by the source system.
|
||||
"oldestBookingDate": summary.get("oldestBookingDate"),
|
||||
"newestBookingDate": summary.get("newestBookingDate"),
|
||||
},
|
||||
}
|
||||
try:
|
||||
|
|
@ -321,6 +435,9 @@ class AccountingDataSync:
|
|||
|
||||
We pre-build the line rows in memory keyed by the freshly minted entryId
|
||||
so a single ``execute_values`` call can persist all of them.
|
||||
|
||||
Returns ``(entriesCount, linesCount, oldestBookingDate, newestBookingDate)``
|
||||
where the date strings are ISO ``YYYY-MM-DD`` (or ``None`` if no entries).
|
||||
"""
|
||||
import uuid as _uuid
|
||||
t0 = time.time()
|
||||
|
|
@ -329,12 +446,23 @@ class AccountingDataSync:
|
|||
|
||||
entryRows: List[Dict[str, Any]] = []
|
||||
lineRows: List[Dict[str, Any]] = []
|
||||
oldestDate: Optional[str] = None
|
||||
newestDate: Optional[str] = None
|
||||
for raw in rawEntries:
|
||||
entryId = str(_uuid.uuid4())
|
||||
rawDate = raw.get("bookingDate")
|
||||
bookingTs = _isoDateToTimestamp(rawDate)
|
||||
if rawDate:
|
||||
isoDay = str(rawDate).split("T")[0][:10]
|
||||
if isoDay:
|
||||
if oldestDate is None or isoDay < oldestDate:
|
||||
oldestDate = isoDay
|
||||
if newestDate is None or isoDay > newestDate:
|
||||
newestDate = isoDay
|
||||
entryRows.append({
|
||||
"id": entryId,
|
||||
"externalId": raw.get("externalId"),
|
||||
"bookingDate": raw.get("bookingDate"),
|
||||
"bookingDate": bookingTs,
|
||||
"reference": raw.get("reference"),
|
||||
"description": raw.get("description", ""),
|
||||
"currency": raw.get("currency", "CHF"),
|
||||
|
|
@ -364,8 +492,9 @@ class AccountingDataSync:
|
|||
logger.info(
|
||||
f"Persisted {entriesCount} entries + {linesCount} lines for "
|
||||
f"{featureInstanceId} in {time.time() - t0:.1f}s "
|
||||
f"(window: {oldestDate or '?'} .. {newestDate or '?'})"
|
||||
)
|
||||
return entriesCount, linesCount
|
||||
return entriesCount, linesCount, oldestDate, newestDate
|
||||
|
||||
def _persistContacts(self, customers: list, vendors: list, scope: Dict[str, Any],
|
||||
featureInstanceId: str, modelContact: Type) -> int:
|
||||
|
|
@ -378,12 +507,66 @@ class AccountingDataSync:
|
|||
logger.info(f"Persisted {n} contacts for {featureInstanceId} in {time.time() - t0:.1f}s")
|
||||
return n
|
||||
|
||||
def _persistBalances(self, featureInstanceId: str, mandateId: str,
|
||||
modelEntry: Type, modelLine: Type, modelBalance: Type) -> int:
|
||||
"""Re-aggregate journal lines into monthly + annual balances."""
|
||||
def _persistBalances(
|
||||
self,
|
||||
featureInstanceId: str,
|
||||
mandateId: str,
|
||||
modelEntry: Type,
|
||||
modelLine: Type,
|
||||
modelBalance: Type,
|
||||
connectorBalances: list,
|
||||
source: str,
|
||||
) -> int:
|
||||
"""Persist account balances per (account, period) into ``TrusteeDataAccountBalance``.
|
||||
|
||||
Source of truth (``source="connector"``): the list returned by
|
||||
``BaseAccountingConnector.getAccountBalances`` is persisted with
|
||||
``openingBalance``/``closingBalance`` from the connector. If the
|
||||
connector doesn't supply ``debitTotal``/``creditTotal`` (e.g. RMA's
|
||||
``/gl/saldo`` only returns net balance), those fields are enriched
|
||||
from the already-imported journal lines.
|
||||
|
||||
Fallback (``source="local-fallback"``): aggregate the just-persisted
|
||||
journal lines into **cumulative** balances.
|
||||
"""
|
||||
t0 = time.time()
|
||||
self._bulkClear(modelBalance, featureInstanceId)
|
||||
scope = {"featureInstanceId": featureInstanceId, "mandateId": mandateId}
|
||||
|
||||
if connectorBalances:
|
||||
rows = [_balanceModelToRow(b, scope) for b in connectorBalances]
|
||||
movements = self._aggregateJournalMovements(featureInstanceId, modelEntry, modelLine)
|
||||
if movements:
|
||||
self._enrichRowsWithMovements(rows, movements)
|
||||
n = self._bulkCreate(modelBalance, rows)
|
||||
logger.info(
|
||||
f"Persisted {n} balances for {featureInstanceId} in {time.time() - t0:.1f}s "
|
||||
f"(source={source})"
|
||||
)
|
||||
return n
|
||||
|
||||
rows = self._buildLocalBalanceFallback(featureInstanceId, modelEntry, modelLine, scope)
|
||||
n = self._bulkCreate(modelBalance, rows)
|
||||
logger.info(
|
||||
f"Persisted {n} balances for {featureInstanceId} in {time.time() - t0:.1f}s "
|
||||
f"(source={source})"
|
||||
)
|
||||
return n
|
||||
|
||||
def _aggregateJournalMovements(
|
||||
self,
|
||||
featureInstanceId: str,
|
||||
modelEntry: Type,
|
||||
modelLine: Type,
|
||||
) -> Dict[tuple, Dict[str, float]]:
|
||||
"""Aggregate debit/credit movements per ``(accountNumber, year, month)``
|
||||
from the already-persisted journal lines.
|
||||
|
||||
Returns ``{(accNo, year, month): {"debit": float, "credit": float}}``.
|
||||
Used by both the local-fallback balance builder and the connector-balance
|
||||
enrichment (RMA's ``/gl/saldo`` delivers net balance but no debit/credit
|
||||
breakdown).
|
||||
"""
|
||||
entries = self._if.db.getRecordset(
|
||||
modelEntry, recordFilter={"featureInstanceId": featureInstanceId},
|
||||
) or []
|
||||
|
|
@ -398,7 +581,7 @@ class AccountingDataSync:
|
|||
modelLine, recordFilter={"featureInstanceId": featureInstanceId},
|
||||
) or []
|
||||
|
||||
buckets: Dict[tuple, Dict[str, float]] = defaultdict(lambda: {"debit": 0.0, "credit": 0.0})
|
||||
movements: Dict[tuple, Dict[str, float]] = defaultdict(lambda: {"debit": 0.0, "credit": 0.0})
|
||||
for ln in lines:
|
||||
if isinstance(ln, dict):
|
||||
jeid = ln.get("journalEntryId", "")
|
||||
|
|
@ -411,40 +594,119 @@ class AccountingDataSync:
|
|||
debit = float(getattr(ln, "debitAmount", 0))
|
||||
credit = float(getattr(ln, "creditAmount", 0))
|
||||
|
||||
bdate = entryDates.get(jeid, "")
|
||||
bdate = entryDates.get(jeid)
|
||||
if not accNo or not bdate:
|
||||
continue
|
||||
parts = bdate.split("-")
|
||||
if len(parts) < 2:
|
||||
continue
|
||||
try:
|
||||
year = int(parts[0])
|
||||
month = int(parts[1])
|
||||
except ValueError:
|
||||
dt = _dt.fromtimestamp(float(bdate), tz=_tz.utc)
|
||||
year = dt.year
|
||||
month = dt.month
|
||||
except (ValueError, TypeError, OSError):
|
||||
continue
|
||||
movements[(accNo, year, month)]["debit"] += debit
|
||||
movements[(accNo, year, month)]["credit"] += credit
|
||||
return movements
|
||||
|
||||
buckets[(accNo, year, month)]["debit"] += debit
|
||||
buckets[(accNo, year, month)]["credit"] += credit
|
||||
buckets[(accNo, year, 0)]["debit"] += debit
|
||||
buckets[(accNo, year, 0)]["credit"] += credit
|
||||
@staticmethod
|
||||
def _enrichRowsWithMovements(
|
||||
rows: List[Dict[str, Any]],
|
||||
movements: Dict[tuple, Dict[str, float]],
|
||||
) -> None:
|
||||
"""Patch ``debitTotal`` / ``creditTotal`` on balance rows from journal movements.
|
||||
|
||||
scope = {"featureInstanceId": featureInstanceId, "mandateId": mandateId}
|
||||
rows = [{
|
||||
For monthly rows: use the exact month's movement.
|
||||
For annual rows (``periodMonth=0``): sum all 12 months of that year+account.
|
||||
Only overwrites if the existing value is 0 (connector didn't provide it).
|
||||
"""
|
||||
for row in rows:
|
||||
if row.get("debitTotal", 0) != 0 or row.get("creditTotal", 0) != 0:
|
||||
continue
|
||||
accNo = row.get("accountNumber", "")
|
||||
year = row.get("periodYear", 0)
|
||||
month = row.get("periodMonth", 0)
|
||||
if month > 0:
|
||||
mov = movements.get((accNo, year, month))
|
||||
if mov:
|
||||
row["debitTotal"] = round(mov["debit"], 2)
|
||||
row["creditTotal"] = round(mov["credit"], 2)
|
||||
else:
|
||||
yearDebit = 0.0
|
||||
yearCredit = 0.0
|
||||
for m in range(1, 13):
|
||||
mov = movements.get((accNo, year, m))
|
||||
if mov:
|
||||
yearDebit += mov["debit"]
|
||||
yearCredit += mov["credit"]
|
||||
if yearDebit or yearCredit:
|
||||
row["debitTotal"] = round(yearDebit, 2)
|
||||
row["creditTotal"] = round(yearCredit, 2)
|
||||
|
||||
def _buildLocalBalanceFallback(
|
||||
self,
|
||||
featureInstanceId: str,
|
||||
modelEntry: Type,
|
||||
modelLine: Type,
|
||||
scope: Dict[str, Any],
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Aggregate ``TrusteeDataJournalLine`` rows into cumulative period balances.
|
||||
|
||||
Returns rows ready for ``_bulkCreate``. Walks every account
|
||||
chronologically through all years observed in the journal so the
|
||||
cumulative balance and per-period opening are exact (within the
|
||||
bounds of the imported window).
|
||||
"""
|
||||
movements = self._aggregateJournalMovements(featureInstanceId, modelEntry, modelLine)
|
||||
observedYears: set = set()
|
||||
observedAccounts: set = set()
|
||||
for (accNo, year, month) in movements:
|
||||
observedYears.add(year)
|
||||
observedAccounts.add(accNo)
|
||||
|
||||
if not observedYears or not observedAccounts:
|
||||
return []
|
||||
|
||||
sortedYears = sorted(observedYears)
|
||||
rows: List[Dict[str, Any]] = []
|
||||
for accNo in sorted(observedAccounts):
|
||||
isER = _isIncomeStatementAccount(accNo)
|
||||
cumulativeOpeningOfYear = 0.0
|
||||
for year in sortedYears:
|
||||
yearOpening = 0.0 if isER else cumulativeOpeningOfYear
|
||||
running = yearOpening
|
||||
yearDebit = 0.0
|
||||
yearCredit = 0.0
|
||||
for month in range(1, 13):
|
||||
opening = running
|
||||
mov = movements.get((accNo, year, month), {"debit": 0.0, "credit": 0.0})
|
||||
running = opening + mov["debit"] - mov["credit"]
|
||||
yearDebit += mov["debit"]
|
||||
yearCredit += mov["credit"]
|
||||
if mov["debit"] == 0 and mov["credit"] == 0 and opening == 0 and running == 0:
|
||||
continue
|
||||
rows.append({
|
||||
"accountNumber": accNo,
|
||||
"periodYear": year,
|
||||
"periodMonth": month,
|
||||
"openingBalance": 0.0,
|
||||
"debitTotal": round(totals["debit"], 2),
|
||||
"creditTotal": round(totals["credit"], 2),
|
||||
"closingBalance": round(totals["debit"] - totals["credit"], 2),
|
||||
"openingBalance": round(opening, 2),
|
||||
"debitTotal": round(mov["debit"], 2),
|
||||
"creditTotal": round(mov["credit"], 2),
|
||||
"closingBalance": round(running, 2),
|
||||
"currency": "CHF",
|
||||
**scope,
|
||||
} for (accNo, year, month), totals in buckets.items()]
|
||||
n = self._bulkCreate(modelBalance, rows)
|
||||
logger.info(
|
||||
f"Persisted {n} balances for {featureInstanceId} in {time.time() - t0:.1f}s"
|
||||
)
|
||||
return n
|
||||
})
|
||||
rows.append({
|
||||
"accountNumber": accNo,
|
||||
"periodYear": year,
|
||||
"periodMonth": 0,
|
||||
"openingBalance": round(yearOpening, 2),
|
||||
"debitTotal": round(yearDebit, 2),
|
||||
"creditTotal": round(yearCredit, 2),
|
||||
"closingBalance": round(running, 2),
|
||||
"currency": "CHF",
|
||||
**scope,
|
||||
})
|
||||
cumulativeOpeningOfYear = running
|
||||
return rows
|
||||
|
||||
# ===== Low-level bulk helpers =====
|
||||
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ class AccountingRegistry:
|
|||
_registryInstance: Optional[AccountingRegistry] = None
|
||||
|
||||
|
||||
def _getAccountingRegistry() -> AccountingRegistry:
|
||||
def getAccountingRegistry() -> AccountingRegistry:
|
||||
"""Singleton access to the accounting registry."""
|
||||
global _registryInstance
|
||||
if _registryInstance is None:
|
||||
|
|
|
|||
|
|
@ -6,12 +6,22 @@ API docs: https://downloads.abacus.ch/fileadmin/ablage/abaconnect/htmlfiles/docs
|
|||
Auth: OAuth 2.0 Client Credentials (Service User).
|
||||
Each Abacus instance has its own host URL; there is no central cloud endpoint.
|
||||
Entity API uses OData V4 format.
|
||||
|
||||
Account balances:
|
||||
Abacus exposes an ``AccountBalances`` entity (per fiscal year), but its
|
||||
availability depends on the customer's Abacus license / Profile and is
|
||||
NOT guaranteed for all instances. The robust default is therefore to
|
||||
aggregate balances locally from ``GeneralJournalEntries`` (always
|
||||
present). If a future iteration confirms the entity for a specific
|
||||
instance, ``getAccountBalances`` can be extended to prefer that source
|
||||
via a config flag (e.g. ``useAccountBalancesEntity: true``).
|
||||
"""
|
||||
|
||||
import base64
|
||||
import calendar
|
||||
import logging
|
||||
import time
|
||||
from typing import List, Dict, Any, Optional
|
||||
from typing import List, Dict, Any, Optional, Tuple
|
||||
|
||||
import aiohttp
|
||||
|
||||
|
|
@ -19,6 +29,7 @@ from ..accountingConnectorBase import (
|
|||
BaseAccountingConnector,
|
||||
AccountingBooking,
|
||||
AccountingChart,
|
||||
AccountingPeriodBalance,
|
||||
ConnectorConfigField,
|
||||
SyncResult,
|
||||
)
|
||||
|
|
@ -27,6 +38,21 @@ from modules.shared.i18nRegistry import t
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _formatLastDayOfMonth(year: int, month: int) -> str:
|
||||
lastDay = calendar.monthrange(year, month)[1]
|
||||
return f"{year:04d}-{month:02d}-{lastDay:02d}"
|
||||
|
||||
|
||||
def _isIncomeStatementAccount(accountNumber: str) -> bool:
|
||||
"""Swiss KMU-Kontenrahmen heuristic: 1xxx + 2xxx -> balance sheet (cumulative);
|
||||
3xxx..9xxx -> income statement (reset per fiscal year).
|
||||
"""
|
||||
a = (accountNumber or "").strip()
|
||||
if not a or not a[0].isdigit():
|
||||
return False
|
||||
return a[0] not in ("1", "2")
|
||||
|
||||
|
||||
class AccountingConnectorAbacus(BaseAccountingConnector):
|
||||
|
||||
def __init__(self):
|
||||
|
|
@ -341,3 +367,158 @@ class AccountingConnectorAbacus(BaseAccountingConnector):
|
|||
except Exception as e:
|
||||
logger.error(f"Abacus getVendors error: {e}")
|
||||
return []
|
||||
|
||||
async def getAccountBalances(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
years: List[int],
|
||||
accountNumbers: Optional[List[str]] = None,
|
||||
) -> List[AccountingPeriodBalance]:
|
||||
"""Aggregate account balances from ``GeneralJournalEntries`` (OData V4).
|
||||
|
||||
Strategy:
|
||||
1. Page through ``GET GeneralJournalEntries?$filter=JournalDate le YYYY-12-31``
|
||||
until ``@odata.nextLink`` is exhausted. Including ALL prior years
|
||||
is required to compute the carry-over for balance-sheet accounts.
|
||||
2. Per (account, year, month) accumulate ``DebitAmount``/``CreditAmount``
|
||||
from ``Lines``.
|
||||
3. Income-statement accounts (3xxx-9xxx) reset to 0 per fiscal year;
|
||||
balance-sheet accounts (1xxx-2xxx) carry their cumulative balance.
|
||||
|
||||
Optional optimization (not yet active): if the customer's Abacus
|
||||
instance ships the ``AccountBalances`` OData entity, it can return
|
||||
authoritative period balances directly. Detect via a probe GET on
|
||||
``AccountBalances?$top=1`` and prefer that source. This is intentionally
|
||||
deferred until we hit a customer where the entity is available --
|
||||
the local aggregation is always-correct fallback.
|
||||
"""
|
||||
if not years:
|
||||
return []
|
||||
sortedYears = sorted({int(y) for y in years if y})
|
||||
minYear = sortedYears[0]
|
||||
maxYear = sortedYears[-1]
|
||||
accountNumbersSet = set(accountNumbers) if accountNumbers else None
|
||||
|
||||
headers = await self._buildAuthHeaders(config)
|
||||
if not headers:
|
||||
logger.warning("Abacus getAccountBalances: no access token, skipping")
|
||||
return []
|
||||
|
||||
rawEntries = await self._fetchAllJournalEntries(config, headers, dateTo=f"{maxYear}-12-31")
|
||||
|
||||
movements: Dict[Tuple[str, int, int], Dict[str, float]] = {}
|
||||
seenAccounts: set = set()
|
||||
for entry in rawEntries:
|
||||
dateRaw = str(entry.get("JournalDate") or "")[:10]
|
||||
if len(dateRaw) < 7:
|
||||
continue
|
||||
try:
|
||||
year = int(dateRaw[:4])
|
||||
month = int(dateRaw[5:7])
|
||||
except ValueError:
|
||||
continue
|
||||
for line in (entry.get("Lines") or []):
|
||||
accNo = str(line.get("AccountId") or "").strip()
|
||||
if not accNo:
|
||||
continue
|
||||
seenAccounts.add(accNo)
|
||||
try:
|
||||
debit = float(line.get("DebitAmount") or 0)
|
||||
credit = float(line.get("CreditAmount") or 0)
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
if debit == 0 and credit == 0:
|
||||
continue
|
||||
bucket = movements.setdefault((accNo, year, month), {"debit": 0.0, "credit": 0.0})
|
||||
bucket["debit"] += debit
|
||||
bucket["credit"] += credit
|
||||
|
||||
results: List[AccountingPeriodBalance] = []
|
||||
for accNo in sorted(seenAccounts):
|
||||
if accountNumbersSet is not None and accNo not in accountNumbersSet:
|
||||
continue
|
||||
isER = _isIncomeStatementAccount(accNo)
|
||||
|
||||
preMinYearBalance = 0.0
|
||||
if not isER:
|
||||
for (a, yr, _mo), m in movements.items():
|
||||
if a == accNo and yr < minYear:
|
||||
preMinYearBalance += m["debit"] - m["credit"]
|
||||
|
||||
cumulativeOpeningOfYear = preMinYearBalance
|
||||
for year in sortedYears:
|
||||
yearOpening = 0.0 if isER else cumulativeOpeningOfYear
|
||||
running = yearOpening
|
||||
yearDebit = 0.0
|
||||
yearCredit = 0.0
|
||||
for month in range(1, 13):
|
||||
opening = running
|
||||
mov = movements.get((accNo, year, month), {"debit": 0.0, "credit": 0.0})
|
||||
running = opening + mov["debit"] - mov["credit"]
|
||||
yearDebit += mov["debit"]
|
||||
yearCredit += mov["credit"]
|
||||
results.append(AccountingPeriodBalance(
|
||||
accountNumber=accNo,
|
||||
periodYear=year,
|
||||
periodMonth=month,
|
||||
openingBalance=round(opening, 2),
|
||||
debitTotal=round(mov["debit"], 2),
|
||||
creditTotal=round(mov["credit"], 2),
|
||||
closingBalance=round(running, 2),
|
||||
currency="CHF",
|
||||
asOfDate=_formatLastDayOfMonth(year, month),
|
||||
))
|
||||
|
||||
results.append(AccountingPeriodBalance(
|
||||
accountNumber=accNo,
|
||||
periodYear=year,
|
||||
periodMonth=0,
|
||||
openingBalance=round(yearOpening, 2),
|
||||
debitTotal=round(yearDebit, 2),
|
||||
creditTotal=round(yearCredit, 2),
|
||||
closingBalance=round(running, 2),
|
||||
currency="CHF",
|
||||
asOfDate=f"{year}-12-31",
|
||||
))
|
||||
|
||||
cumulativeOpeningOfYear = running
|
||||
|
||||
logger.info(
|
||||
"Abacus getAccountBalances: %s rows from %s journal entries (years=%s)",
|
||||
len(results), len(rawEntries), sortedYears,
|
||||
)
|
||||
return results
|
||||
|
||||
async def _fetchAllJournalEntries(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
headers: Dict[str, str],
|
||||
dateTo: str,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Page through ``GeneralJournalEntries`` (OData V4) following ``@odata.nextLink``.
|
||||
|
||||
We filter ``JournalDate le dateTo`` to bound the result, but include
|
||||
ALL prior years (no lower bound) so cumulative balance-sheet
|
||||
carry-over is correct.
|
||||
"""
|
||||
results: List[Dict[str, Any]] = []
|
||||
baseUrl = self._buildEntityUrl(config, f"GeneralJournalEntries?$filter=JournalDate le {dateTo}")
|
||||
nextUrl: Optional[str] = baseUrl
|
||||
async with aiohttp.ClientSession() as session:
|
||||
while nextUrl:
|
||||
try:
|
||||
async with session.get(nextUrl, headers=headers, timeout=aiohttp.ClientTimeout(total=60)) as resp:
|
||||
if resp.status != 200:
|
||||
body = await resp.text()
|
||||
logger.warning("Abacus GeneralJournalEntries HTTP %s: %s", resp.status, body[:200])
|
||||
break
|
||||
data = await resp.json()
|
||||
except Exception as ex:
|
||||
logger.warning("Abacus GeneralJournalEntries request failed: %s", ex)
|
||||
break
|
||||
page = data.get("value") or []
|
||||
if not isinstance(page, list):
|
||||
break
|
||||
results.extend(page)
|
||||
nextUrl = data.get("@odata.nextLink")
|
||||
return results
|
||||
|
|
|
|||
|
|
@ -7,10 +7,20 @@ Auth: Personal Access Token (PAT) as Bearer token.
|
|||
Base URL: https://api.bexio.com/
|
||||
Note: Bexio uses internal account IDs (int), not account numbers.
|
||||
The connector caches the chart of accounts to resolve accountNumber -> account_id.
|
||||
|
||||
Account balances:
|
||||
Bexio does NOT expose a dedicated saldo endpoint (no equivalent to RMA's
|
||||
``/gl/saldo``). ``getAccountBalances`` therefore aggregates balances
|
||||
locally by paginating ``GET /3.0/accounting/journal`` (max 2000 rows per
|
||||
page) and computing cumulative balances per (account, period). Income-
|
||||
statement accounts (3xxx-9xxx in the Swiss KMU-Kontenrahmen) are reset
|
||||
at the start of each fiscal year; balance-sheet accounts (1xxx-2xxx)
|
||||
carry their cumulative balance across years.
|
||||
"""
|
||||
|
||||
import calendar
|
||||
import logging
|
||||
from typing import List, Dict, Any, Optional
|
||||
from typing import List, Dict, Any, Optional, Tuple
|
||||
|
||||
import aiohttp
|
||||
|
||||
|
|
@ -18,6 +28,7 @@ from ..accountingConnectorBase import (
|
|||
BaseAccountingConnector,
|
||||
AccountingBooking,
|
||||
AccountingChart,
|
||||
AccountingPeriodBalance,
|
||||
ConnectorConfigField,
|
||||
SyncResult,
|
||||
)
|
||||
|
|
@ -26,6 +37,23 @@ from modules.shared.i18nRegistry import t
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
_DEFAULT_API_BASE_URL = "https://api.bexio.com/"
|
||||
_JOURNAL_PAGE_SIZE = 2000
|
||||
|
||||
|
||||
def _formatLastDayOfMonth(year: int, month: int) -> str:
|
||||
lastDay = calendar.monthrange(year, month)[1]
|
||||
return f"{year:04d}-{month:02d}-{lastDay:02d}"
|
||||
|
||||
|
||||
def _isIncomeStatementAccount(accountNumber: str) -> bool:
|
||||
"""Swiss KMU-Kontenrahmen: 1xxx Aktiven + 2xxx Passiven -> balance sheet
|
||||
(cumulative balance carried across years); 3xxx..9xxx -> income statement
|
||||
(reset to 0 at fiscal-year start).
|
||||
"""
|
||||
a = (accountNumber or "").strip()
|
||||
if not a or not a[0].isdigit():
|
||||
return False
|
||||
return a[0] not in ("1", "2")
|
||||
|
||||
|
||||
class AccountingConnectorBexio(BaseAccountingConnector):
|
||||
|
|
@ -260,3 +288,148 @@ class AccountingConnectorBexio(BaseAccountingConnector):
|
|||
except Exception as e:
|
||||
logger.error(f"Bexio getCustomers error: {e}")
|
||||
return []
|
||||
|
||||
async def getAccountBalances(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
years: List[int],
|
||||
accountNumbers: Optional[List[str]] = None,
|
||||
) -> List[AccountingPeriodBalance]:
|
||||
"""Aggregate account balances locally from ``/3.0/accounting/journal``.
|
||||
|
||||
Bexio offers no per-account saldo endpoint, so we paginate the full
|
||||
journal up to the latest requested fiscal year-end and compute
|
||||
opening / debit / credit / closing per (account, period). For balance-
|
||||
sheet accounts the cumulative carry-over from prior years is included;
|
||||
for income-statement accounts the balance is reset at the start of
|
||||
every requested fiscal year (per Swiss accounting principles).
|
||||
"""
|
||||
if not years:
|
||||
return []
|
||||
sortedYears = sorted({int(y) for y in years if y})
|
||||
minYear = sortedYears[0]
|
||||
maxYear = sortedYears[-1]
|
||||
accountNumbersSet = set(accountNumbers) if accountNumbers else None
|
||||
|
||||
accounts = await self._loadRawAccounts(config)
|
||||
accIdToNumber: Dict[int, str] = {acc.get("id"): str(acc.get("account_no", "")) for acc in accounts if acc.get("id") is not None and acc.get("account_no") is not None}
|
||||
if not accIdToNumber:
|
||||
logger.warning("Bexio getAccountBalances: chart of accounts is empty -- cannot derive balances")
|
||||
return []
|
||||
|
||||
rawEntries = await self._fetchAllJournalRows(config, dateTo=f"{maxYear}-12-31")
|
||||
|
||||
movements: Dict[Tuple[str, int, int], Dict[str, float]] = {}
|
||||
for e in rawEntries:
|
||||
dateRaw = str(e.get("date") or "")[:10]
|
||||
if len(dateRaw) < 7:
|
||||
continue
|
||||
try:
|
||||
year = int(dateRaw[:4])
|
||||
month = int(dateRaw[5:7])
|
||||
except ValueError:
|
||||
continue
|
||||
try:
|
||||
amount = float(e.get("amount") or 0)
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
if amount == 0:
|
||||
continue
|
||||
debitAcc = accIdToNumber.get(e.get("debit_account_id"))
|
||||
creditAcc = accIdToNumber.get(e.get("credit_account_id"))
|
||||
if debitAcc:
|
||||
bucket = movements.setdefault((debitAcc, year, month), {"debit": 0.0, "credit": 0.0})
|
||||
bucket["debit"] += amount
|
||||
if creditAcc:
|
||||
bucket = movements.setdefault((creditAcc, year, month), {"debit": 0.0, "credit": 0.0})
|
||||
bucket["credit"] += amount
|
||||
|
||||
accountsByNumber = sorted({n for n in accIdToNumber.values() if n})
|
||||
results: List[AccountingPeriodBalance] = []
|
||||
|
||||
for accNo in accountsByNumber:
|
||||
if accountNumbersSet is not None and accNo not in accountNumbersSet:
|
||||
continue
|
||||
isER = _isIncomeStatementAccount(accNo)
|
||||
|
||||
preMinYearBalance = 0.0
|
||||
if not isER:
|
||||
for (a, yr, _mo), m in movements.items():
|
||||
if a == accNo and yr < minYear:
|
||||
preMinYearBalance += m["debit"] - m["credit"]
|
||||
|
||||
cumulativeOpeningOfYear = preMinYearBalance
|
||||
for year in sortedYears:
|
||||
if isER:
|
||||
yearOpening = 0.0
|
||||
else:
|
||||
yearOpening = cumulativeOpeningOfYear
|
||||
|
||||
running = yearOpening
|
||||
yearDebit = 0.0
|
||||
yearCredit = 0.0
|
||||
for month in range(1, 13):
|
||||
opening = running
|
||||
mov = movements.get((accNo, year, month), {"debit": 0.0, "credit": 0.0})
|
||||
running = opening + mov["debit"] - mov["credit"]
|
||||
yearDebit += mov["debit"]
|
||||
yearCredit += mov["credit"]
|
||||
results.append(AccountingPeriodBalance(
|
||||
accountNumber=accNo,
|
||||
periodYear=year,
|
||||
periodMonth=month,
|
||||
openingBalance=round(opening, 2),
|
||||
debitTotal=round(mov["debit"], 2),
|
||||
creditTotal=round(mov["credit"], 2),
|
||||
closingBalance=round(running, 2),
|
||||
currency="CHF",
|
||||
asOfDate=_formatLastDayOfMonth(year, month),
|
||||
))
|
||||
|
||||
results.append(AccountingPeriodBalance(
|
||||
accountNumber=accNo,
|
||||
periodYear=year,
|
||||
periodMonth=0,
|
||||
openingBalance=round(yearOpening, 2),
|
||||
debitTotal=round(yearDebit, 2),
|
||||
creditTotal=round(yearCredit, 2),
|
||||
closingBalance=round(running, 2),
|
||||
currency="CHF",
|
||||
asOfDate=f"{year}-12-31",
|
||||
))
|
||||
|
||||
cumulativeOpeningOfYear = running
|
||||
|
||||
logger.info("Bexio getAccountBalances: %s rows from %s journal entries (years=%s)", len(results), len(rawEntries), sortedYears)
|
||||
return results
|
||||
|
||||
async def _fetchAllJournalRows(self, config: Dict[str, Any], dateTo: str) -> List[Dict[str, Any]]:
|
||||
"""Paginate ``GET /3.0/accounting/journal?to=YYYY-12-31`` and return all rows.
|
||||
|
||||
Bexio caps page size at 2000; we fetch until a short page is returned.
|
||||
Failures abort early (returning whatever rows were collected) -- the
|
||||
caller logs the row count, so partial data is visible.
|
||||
"""
|
||||
rows: List[Dict[str, Any]] = []
|
||||
offset = 0
|
||||
url = self._buildUrl(config, "3.0/accounting/journal")
|
||||
async with aiohttp.ClientSession() as session:
|
||||
while True:
|
||||
params = {"to": dateTo, "limit": str(_JOURNAL_PAGE_SIZE), "offset": str(offset)}
|
||||
try:
|
||||
async with session.get(url, headers=self._buildHeaders(config), params=params, timeout=aiohttp.ClientTimeout(total=60)) as resp:
|
||||
if resp.status != 200:
|
||||
body = await resp.text()
|
||||
logger.warning("Bexio /accounting/journal HTTP %s offset=%s: %s", resp.status, offset, body[:200])
|
||||
break
|
||||
page = await resp.json()
|
||||
except Exception as ex:
|
||||
logger.warning("Bexio /accounting/journal request failed offset=%s: %s", offset, ex)
|
||||
break
|
||||
if not isinstance(page, list) or not page:
|
||||
break
|
||||
rows.extend(page)
|
||||
if len(page) < _JOURNAL_PAGE_SIZE:
|
||||
break
|
||||
offset += _JOURNAL_PAGE_SIZE
|
||||
return rows
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ Base URL: https://service.runmyaccounts.com/api/latest/clients/{clientName}/
|
|||
"""
|
||||
|
||||
import asyncio
|
||||
import calendar
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
|
|
@ -21,6 +22,7 @@ from ..accountingConnectorBase import (
|
|||
BaseAccountingConnector,
|
||||
AccountingBooking,
|
||||
AccountingChart,
|
||||
AccountingPeriodBalance,
|
||||
ConnectorConfigField,
|
||||
SyncResult,
|
||||
)
|
||||
|
|
@ -31,6 +33,73 @@ logger = logging.getLogger(__name__)
|
|||
_DEFAULT_API_BASE_URL = "https://service.runmyaccounts.com/api/latest/clients/"
|
||||
|
||||
|
||||
def _formatLastDayOfMonth(year: int, month: int) -> str:
|
||||
"""Return ``YYYY-MM-DD`` of the last day of a calendar month."""
|
||||
lastDay = calendar.monthrange(year, month)[1]
|
||||
return f"{year:04d}-{month:02d}-{lastDay:02d}"
|
||||
|
||||
|
||||
def _isIncomeStatementAccount(accountNumber: str) -> bool:
|
||||
"""Decide whether an account is part of the income statement (Erfolgsrechnung).
|
||||
|
||||
Swiss KMU-Kontenrahmen: 1xxx Aktiven, 2xxx Passiven (incl. 28xx
|
||||
Eigenkapital) -> balance sheet; 3xxx..9xxx -> income statement.
|
||||
Used by the RMA connector to choose between the two `/gl/saldo` query
|
||||
variants (with vs. without ``from`` parameter).
|
||||
"""
|
||||
a = (accountNumber or "").strip()
|
||||
if not a or not a[0].isdigit():
|
||||
return False
|
||||
return a[0] not in ("1", "2")
|
||||
|
||||
|
||||
def _parseSaldoBody(body: str) -> List[tuple]:
|
||||
"""Parse the response body of ``GET /gl/saldo`` (JSON or XML).
|
||||
|
||||
Returns a list of ``(accountNumber, saldo)`` tuples. The endpoint
|
||||
delivers ``{"row": [{"column": [accno, label, saldo]}, ...]}`` (JSON) or
|
||||
``<table><row><column>accno</column><column>label</column><column>saldo</column></row>...``
|
||||
(XML). Rows that cannot be parsed are silently skipped to keep one bad row
|
||||
from poisoning the whole sync.
|
||||
"""
|
||||
if not body or not body.strip():
|
||||
return []
|
||||
rows: List[tuple] = []
|
||||
try:
|
||||
data = json.loads(body)
|
||||
items = data.get("row") if isinstance(data, dict) else data
|
||||
if isinstance(items, dict):
|
||||
items = [items]
|
||||
if isinstance(items, list):
|
||||
for item in items:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
cols = item.get("column") or []
|
||||
if isinstance(cols, list) and len(cols) >= 3:
|
||||
accno = str(cols[0]).strip()
|
||||
try:
|
||||
saldo = float(cols[2])
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
if accno:
|
||||
rows.append((accno, saldo))
|
||||
return rows
|
||||
except (json.JSONDecodeError, ValueError):
|
||||
pass
|
||||
rowMatches = re.findall(r"<row>(.*?)</row>", body, re.DOTALL)
|
||||
for raw in rowMatches:
|
||||
cols = re.findall(r"<column>([^<]*)</column>", raw)
|
||||
if len(cols) >= 3:
|
||||
accno = cols[0].strip()
|
||||
try:
|
||||
saldo = float(cols[2])
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
if accno:
|
||||
rows.append((accno, saldo))
|
||||
return rows
|
||||
|
||||
|
||||
class AccountingConnectorRma(BaseAccountingConnector):
|
||||
|
||||
def getConnectorType(self) -> str:
|
||||
|
|
@ -437,13 +506,201 @@ class AccountingConnectorRma(BaseAccountingConnector):
|
|||
"creditAmount": credit,
|
||||
"description": desc,
|
||||
})
|
||||
entry["totalAmount"] += max(debit, credit)
|
||||
# Booking total = sum of debits (== sum of credits for a balanced
|
||||
# booking). Summing max(debit, credit) per line would double-count
|
||||
# a balanced 2-line booking (200 instead of 100).
|
||||
entry["totalAmount"] += debit
|
||||
|
||||
return list(entriesByRef.values())
|
||||
except Exception as e:
|
||||
logger.error(f"RMA getJournalEntries error: {e}", exc_info=True)
|
||||
return []
|
||||
|
||||
async def getAccountBalances(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
years: List[int],
|
||||
accountNumbers: Optional[List[str]] = None,
|
||||
) -> List[AccountingPeriodBalance]:
|
||||
"""Fetch authoritative closing balances per account and period via RMA's
|
||||
``GET /gl/saldo`` endpoint.
|
||||
|
||||
For each requested year we issue 13 API calls (one per month-end + one
|
||||
for the prior fiscal year-end as opening reference). The endpoint
|
||||
returns the cumulative balance per account at the requested ``to`` date,
|
||||
already including prior-year carry-over and yearend bookings -- which
|
||||
is exactly the value the local journal-line aggregation cannot
|
||||
reconstruct when the import window covers only part of the history.
|
||||
|
||||
``accno`` is mandatory; we use a digit-length-grouped wildcard
|
||||
(``xxxx`` matches all 4-digit accounts, ``xxxxx`` all 5-digit, etc.)
|
||||
derived from the chart of accounts, so 1-2 calls cover every account
|
||||
per period.
|
||||
"""
|
||||
if not years:
|
||||
return []
|
||||
|
||||
accountNumbersSet: Optional[set] = set(accountNumbers) if accountNumbers else None
|
||||
wildcardPatterns = await self._resolveWildcardPatterns(config)
|
||||
if not wildcardPatterns:
|
||||
logger.warning("RMA getAccountBalances: chart of accounts is empty, no wildcards derivable")
|
||||
return []
|
||||
|
||||
results: List[AccountingPeriodBalance] = []
|
||||
sortedYears = sorted({int(y) for y in years if y})
|
||||
|
||||
for year in sortedYears:
|
||||
priorYearEnd = f"{year - 1}-12-31"
|
||||
priorSaldosRaw = await self._fetchSaldoMapForDate(config, wildcardPatterns, priorYearEnd)
|
||||
# ER (income statement) accounts reset to 0 at the start of each
|
||||
# fiscal year -- prior-year YTD must NOT carry forward as opening.
|
||||
priorSaldos = {a: (0.0 if _isIncomeStatementAccount(a) else v) for a, v in priorSaldosRaw.items()}
|
||||
|
||||
runningOpening: Dict[str, float] = dict(priorSaldos)
|
||||
decSaldos: Dict[str, float] = {}
|
||||
|
||||
for month in range(1, 13):
|
||||
lastDay = _formatLastDayOfMonth(year, month)
|
||||
saldos = await self._fetchSaldoMapForDate(config, wildcardPatterns, lastDay)
|
||||
|
||||
accountKeys = set(saldos.keys()) | set(runningOpening.keys())
|
||||
for accno in accountKeys:
|
||||
if accountNumbersSet is not None and accno not in accountNumbersSet:
|
||||
continue
|
||||
closing = saldos.get(accno, runningOpening.get(accno, 0.0))
|
||||
opening = runningOpening.get(accno, 0.0)
|
||||
results.append(AccountingPeriodBalance(
|
||||
accountNumber=accno,
|
||||
periodYear=year,
|
||||
periodMonth=month,
|
||||
openingBalance=round(opening, 2),
|
||||
closingBalance=round(closing, 2),
|
||||
currency="CHF",
|
||||
asOfDate=lastDay,
|
||||
))
|
||||
runningOpening = {**runningOpening, **saldos}
|
||||
if month == 12:
|
||||
decSaldos = dict(saldos)
|
||||
|
||||
annualKeys = set(decSaldos.keys()) | set(priorSaldos.keys())
|
||||
for accno in annualKeys:
|
||||
if accountNumbersSet is not None and accno not in accountNumbersSet:
|
||||
continue
|
||||
closing = decSaldos.get(accno, priorSaldos.get(accno, 0.0))
|
||||
opening = priorSaldos.get(accno, 0.0)
|
||||
results.append(AccountingPeriodBalance(
|
||||
accountNumber=accno,
|
||||
periodYear=year,
|
||||
periodMonth=0,
|
||||
openingBalance=round(opening, 2),
|
||||
closingBalance=round(closing, 2),
|
||||
currency="CHF",
|
||||
asOfDate=f"{year}-12-31",
|
||||
))
|
||||
|
||||
logger.info(
|
||||
"RMA getAccountBalances: %s rows for years=%s, wildcards=%s",
|
||||
len(results), sortedYears, wildcardPatterns,
|
||||
)
|
||||
return results
|
||||
|
||||
async def _resolveWildcardPatterns(self, config: Dict[str, Any]) -> List[str]:
|
||||
"""Derive `accno` wildcard patterns from the chart of accounts.
|
||||
|
||||
RMA's `/gl/saldo` requires `accno`; using digit-length-grouped
|
||||
wildcards (`xxxx`, `xxxxx`, ...) lets us cover every account in 1-2
|
||||
calls per period instead of one call per account number.
|
||||
"""
|
||||
try:
|
||||
charts = await self.getChartOfAccounts(config)
|
||||
except Exception as ex:
|
||||
logger.warning("RMA _resolveWildcardPatterns: getChartOfAccounts failed: %s", ex)
|
||||
return []
|
||||
lengths = set()
|
||||
for c in charts:
|
||||
accno = (c.accountNumber or "").strip()
|
||||
if accno.isdigit():
|
||||
lengths.add(len(accno))
|
||||
return [("x" * n) for n in sorted(lengths)]
|
||||
|
||||
async def _fetchSaldoMapForDate(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
wildcardPatterns: List[str],
|
||||
toDate: str,
|
||||
) -> Dict[str, float]:
|
||||
"""Call `/gl/saldo` and return ``{accountNumber: cumulativeSaldo}``.
|
||||
|
||||
Per RMA docs ("Warning: Chart of the balance sheet do not need a from
|
||||
date. Charts of the income statement need from and to parameter."),
|
||||
we issue **two** calls per pattern:
|
||||
|
||||
* No ``from`` -> correct cumulative saldo for balance-sheet accounts
|
||||
(1xxx, 2xxx in Swiss KMU-Kontenrahmen).
|
||||
* ``from=YYYY-01-01`` (year of ``toDate``) -> correct YTD result for
|
||||
income-statement accounts (3xxx..9xxx, which reset annually).
|
||||
|
||||
Per account number we keep the value from the appropriate call.
|
||||
Empty / failed responses are logged at DEBUG and skipped to avoid
|
||||
aborting the whole sync.
|
||||
"""
|
||||
yearStart = f"{toDate[:4]}-01-01"
|
||||
bsRows: Dict[str, float] = {}
|
||||
erRows: Dict[str, float] = {}
|
||||
for pattern in wildcardPatterns:
|
||||
try:
|
||||
bs = await self._fetchSaldoRows(config, accno=pattern, fromDate=None, toDate=toDate)
|
||||
except Exception as ex:
|
||||
logger.debug("RMA _fetchSaldoMapForDate(BS, pattern=%s, to=%s) failed: %s", pattern, toDate, ex)
|
||||
bs = []
|
||||
try:
|
||||
er = await self._fetchSaldoRows(config, accno=pattern, fromDate=yearStart, toDate=toDate)
|
||||
except Exception as ex:
|
||||
logger.debug("RMA _fetchSaldoMapForDate(ER, pattern=%s, %s..%s) failed: %s", pattern, yearStart, toDate, ex)
|
||||
er = []
|
||||
for accno, saldo in bs:
|
||||
bsRows[accno] = saldo
|
||||
for accno, saldo in er:
|
||||
erRows[accno] = saldo
|
||||
|
||||
merged: Dict[str, float] = {}
|
||||
for accno in set(bsRows) | set(erRows):
|
||||
if _isIncomeStatementAccount(accno):
|
||||
merged[accno] = erRows.get(accno, bsRows.get(accno, 0.0))
|
||||
else:
|
||||
merged[accno] = bsRows.get(accno, erRows.get(accno, 0.0))
|
||||
return merged
|
||||
|
||||
async def _fetchSaldoRows(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
accno: str,
|
||||
fromDate: Optional[str],
|
||||
toDate: str,
|
||||
) -> List[tuple]:
|
||||
"""Single `/gl/saldo` call. Returns list of ``(accountNumber, saldo)`` tuples."""
|
||||
url = self._buildUrl(config, "gl/saldo")
|
||||
params: Dict[str, str] = {
|
||||
"accno": accno,
|
||||
"to": toDate,
|
||||
"bookkeeping_main_curr": "true",
|
||||
}
|
||||
if fromDate:
|
||||
params["from"] = fromDate
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(
|
||||
url,
|
||||
headers=self._buildHeaders(config),
|
||||
params=params,
|
||||
timeout=aiohttp.ClientTimeout(total=20),
|
||||
) as resp:
|
||||
if resp.status != 200:
|
||||
body = await resp.text()
|
||||
logger.debug("RMA /gl/saldo accno=%s from=%s to=%s -> HTTP %s: %s", accno, fromDate, toDate, resp.status, body[:200])
|
||||
return []
|
||||
body = await resp.text()
|
||||
return _parseSaldoBody(body)
|
||||
|
||||
async def _fetchGlBulk(self, config: Dict[str, Any], params: Dict[str, str]) -> List[Dict[str, Any]]:
|
||||
"""Try GET /gl to fetch journal entries in bulk (not all RMA versions support this)."""
|
||||
try:
|
||||
|
|
@ -494,7 +751,9 @@ class AccountingConnectorRma(BaseAccountingConnector):
|
|||
"creditAmount": credit,
|
||||
"description": t.get("memo", ""),
|
||||
})
|
||||
totalAmt += max(debit, credit)
|
||||
# Sum debits only -- equals sum of credits for a balanced
|
||||
# booking. max(debit, credit) per line would double-count.
|
||||
totalAmt += debit
|
||||
|
||||
entries.append({
|
||||
"externalId": str(batch.get("id", ref)),
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
"""Trustee models: TrusteeOrganisation, TrusteeRole, TrusteeAccess, TrusteeContract, TrusteeDocument, TrusteePosition."""
|
||||
|
||||
from enum import Enum
|
||||
from typing import Optional, Dict
|
||||
from typing import Optional, Dict, Any
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
|
|
@ -46,7 +46,7 @@ class TrusteeOrganisation(PowerOnModel):
|
|||
description="Mandate ID (system-level organisation)",
|
||||
json_schema_extra={
|
||||
"label": "Mandat",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -57,7 +57,7 @@ class TrusteeOrganisation(PowerOnModel):
|
|||
description="Feature Instance ID for instance-level isolation",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -92,7 +92,7 @@ class TrusteeRole(PowerOnModel):
|
|||
description="Mandate ID",
|
||||
json_schema_extra={
|
||||
"label": "Mandat",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -103,7 +103,7 @@ class TrusteeRole(PowerOnModel):
|
|||
description="Feature Instance ID for instance-level isolation",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -132,7 +132,7 @@ class TrusteeAccess(PowerOnModel):
|
|||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_options": "/api/trustee/{instanceId}/organisations/options",
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation"},
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation", "labelField": "label"},
|
||||
}
|
||||
)
|
||||
roleId: str = Field(
|
||||
|
|
@ -143,7 +143,7 @@ class TrusteeAccess(PowerOnModel):
|
|||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_options": "/api/trustee/{instanceId}/roles/options",
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeRole"},
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeRole", "labelField": "desc"},
|
||||
}
|
||||
)
|
||||
userId: str = Field(
|
||||
|
|
@ -154,7 +154,7 @@ class TrusteeAccess(PowerOnModel):
|
|||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_options": "/api/users/options",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
}
|
||||
)
|
||||
contractId: Optional[str] = Field(
|
||||
|
|
@ -167,7 +167,7 @@ class TrusteeAccess(PowerOnModel):
|
|||
"frontend_required": False,
|
||||
"frontend_options": "/api/trustee/{instanceId}/contracts/options",
|
||||
"frontend_depends_on": "organisationId",
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeContract"},
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeContract", "labelField": "label"},
|
||||
}
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
|
|
@ -175,7 +175,7 @@ class TrusteeAccess(PowerOnModel):
|
|||
description="Mandate ID",
|
||||
json_schema_extra={
|
||||
"label": "Mandat",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -186,7 +186,7 @@ class TrusteeAccess(PowerOnModel):
|
|||
description="Feature Instance ID for instance-level isolation",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -215,7 +215,7 @@ class TrusteeContract(PowerOnModel):
|
|||
"frontend_readonly": False, # Editable at creation, then readonly
|
||||
"frontend_required": True,
|
||||
"frontend_options": "/api/trustee/{instanceId}/organisations/options",
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation"},
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeOrganisation", "labelField": "label"},
|
||||
}
|
||||
)
|
||||
label: str = Field(
|
||||
|
|
@ -242,7 +242,7 @@ class TrusteeContract(PowerOnModel):
|
|||
description="Mandate ID",
|
||||
json_schema_extra={
|
||||
"label": "Mandat",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -253,7 +253,7 @@ class TrusteeContract(PowerOnModel):
|
|||
description="Feature Instance ID for instance-level isolation",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False
|
||||
|
|
@ -311,7 +311,7 @@ class TrusteeDocument(PowerOnModel):
|
|||
"frontend_type": "file_reference",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_management", "table": "FileItem"},
|
||||
"fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"},
|
||||
}
|
||||
)
|
||||
documentName: str = Field(
|
||||
|
|
@ -359,7 +359,7 @@ class TrusteeDocument(PowerOnModel):
|
|||
description="Mandate ID (auto-set from context)",
|
||||
json_schema_extra={
|
||||
"label": "Mandat",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
|
|
@ -371,7 +371,7 @@ class TrusteeDocument(PowerOnModel):
|
|||
description="Feature Instance ID for instance-level isolation (auto-set from context)",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
|
|
@ -439,7 +439,7 @@ class TrusteePosition(PowerOnModel):
|
|||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"frontend_options": "/api/trustee/{instanceId}/documents/options",
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument"},
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument", "labelField": "documentName"},
|
||||
}
|
||||
)
|
||||
bankDocumentId: Optional[str] = Field(
|
||||
|
|
@ -451,12 +451,12 @@ class TrusteePosition(PowerOnModel):
|
|||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"frontend_options": "/api/trustee/{instanceId}/documents/options",
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument"},
|
||||
"fk_target": {"db": "poweron_trustee", "table": "TrusteeDocument", "labelField": "documentName"},
|
||||
}
|
||||
)
|
||||
valuta: Optional[str] = Field(
|
||||
valuta: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Value date (ISO format: YYYY-MM-DD)",
|
||||
description="Value date (UTC midnight unix timestamp)",
|
||||
json_schema_extra={
|
||||
"label": "Valutadatum",
|
||||
"frontend_type": "date",
|
||||
|
|
@ -684,9 +684,9 @@ class TrusteePosition(PowerOnModel):
|
|||
"frontend_required": False
|
||||
}
|
||||
)
|
||||
dueDate: Optional[str] = Field(
|
||||
dueDate: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Payment due date (ISO format: YYYY-MM-DD)",
|
||||
description="Payment due date (UTC midnight unix timestamp)",
|
||||
json_schema_extra={
|
||||
"label": "Fälligkeitsdatum",
|
||||
"frontend_type": "date",
|
||||
|
|
@ -699,7 +699,7 @@ class TrusteePosition(PowerOnModel):
|
|||
description="Mandate ID (auto-set from context)",
|
||||
json_schema_extra={
|
||||
"label": "Mandat",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
|
|
@ -711,7 +711,7 @@ class TrusteePosition(PowerOnModel):
|
|||
description="Feature Instance ID for instance-level isolation (auto-set from context)",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
|
|
@ -742,15 +742,15 @@ class TrusteeDataAccount(PowerOnModel):
|
|||
accountGroup: Optional[str] = Field(default=None, description="Account group/category", json_schema_extra={"label": "Gruppe"})
|
||||
currency: str = Field(default="CHF", description="Account currency", json_schema_extra={"label": "Währung"})
|
||||
isActive: bool = Field(default=True, json_schema_extra={"label": "Aktiv"})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
|
||||
|
||||
@i18nModel("Buchung (Sync)")
|
||||
class TrusteeDataJournalEntry(PowerOnModel):
|
||||
"""Journal entry header synced from external accounting system."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
|
||||
externalId: Optional[str] = Field(default=None, description="ID in the source system", json_schema_extra={"label": "Externe ID"})
|
||||
bookingDate: Optional[str] = Field(default=None, description="Booking date (YYYY-MM-DD)", json_schema_extra={"label": "Datum"})
|
||||
bookingDate: Optional[float] = Field(default=None, description="Booking date (UTC unix timestamp)", json_schema_extra={"label": "Datum", "frontend_type": "timestamp"})
|
||||
reference: Optional[str] = Field(default=None, description="Booking reference / voucher number", json_schema_extra={"label": "Referenz"})
|
||||
description: str = Field(default="", description="Booking text", json_schema_extra={"label": "Beschreibung"})
|
||||
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
|
||||
|
|
@ -763,14 +763,14 @@ class TrusteeDataJournalEntry(PowerOnModel):
|
|||
"frontend_format": "R:#'###.00",
|
||||
},
|
||||
)
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
|
||||
|
||||
@i18nModel("Buchungszeile (Sync)")
|
||||
class TrusteeDataJournalLine(PowerOnModel):
|
||||
"""Journal entry line (debit/credit) synced from external accounting system."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
|
||||
journalEntryId: str = Field(description="FK → TrusteeDataJournalEntry.id", json_schema_extra={"label": "Buchung", "fk_target": {"db": "poweron_trustee", "table": "TrusteeDataJournalEntry"}})
|
||||
journalEntryId: str = Field(description="FK → TrusteeDataJournalEntry.id", json_schema_extra={"label": "Buchung", "fk_target": {"db": "poweron_trustee", "table": "TrusteeDataJournalEntry", "labelField": "reference"}})
|
||||
accountNumber: str = Field(description="Account number", json_schema_extra={"label": "Konto"})
|
||||
debitAmount: float = Field(default=0.0, json_schema_extra={"label": "Soll", "frontend_format": "R:#'###.00"})
|
||||
creditAmount: float = Field(default=0.0, json_schema_extra={"label": "Haben", "frontend_format": "R:#'###.00"})
|
||||
|
|
@ -778,8 +778,8 @@ class TrusteeDataJournalLine(PowerOnModel):
|
|||
taxCode: Optional[str] = Field(default=None, json_schema_extra={"label": "Steuercode"})
|
||||
costCenter: Optional[str] = Field(default=None, json_schema_extra={"label": "Kostenstelle"})
|
||||
description: str = Field(default="", json_schema_extra={"label": "Beschreibung"})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
|
||||
|
||||
@i18nModel("Kontakt (Sync)")
|
||||
class TrusteeDataContact(PowerOnModel):
|
||||
|
|
@ -796,8 +796,8 @@ class TrusteeDataContact(PowerOnModel):
|
|||
email: Optional[str] = Field(default=None, json_schema_extra={"label": "E-Mail"})
|
||||
phone: Optional[str] = Field(default=None, json_schema_extra={"label": "Telefon"})
|
||||
vatNumber: Optional[str] = Field(default=None, json_schema_extra={"label": "MWST-Nr."})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
|
||||
|
||||
@i18nModel("Kontosaldo (Sync)")
|
||||
class TrusteeDataAccountBalance(PowerOnModel):
|
||||
|
|
@ -811,8 +811,8 @@ class TrusteeDataAccountBalance(PowerOnModel):
|
|||
creditTotal: float = Field(default=0.0, json_schema_extra={"label": "Haben-Umsatz", "frontend_format": "R:#'###.00"})
|
||||
closingBalance: float = Field(default=0.0, json_schema_extra={"label": "Schlusssaldo", "frontend_format": "R:#'###.00"})
|
||||
currency: str = Field(default="CHF", json_schema_extra={"label": "Währung"})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
|
||||
featureInstanceId: Optional[str] = Field(default=None, json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
|
||||
|
||||
@i18nModel("Buchhaltungs-Konfiguration")
|
||||
class TrusteeAccountingConfig(PowerOnModel):
|
||||
|
|
@ -822,20 +822,57 @@ class TrusteeAccountingConfig(PowerOnModel):
|
|||
Credentials are stored encrypted (decrypted at runtime by the AccountingBridge).
|
||||
"""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
|
||||
featureInstanceId: str = Field(description="FK -> FeatureInstance.id (1:1)", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||
featureInstanceId: str = Field(description="FK -> FeatureInstance.id (1:1)", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
|
||||
connectorType: str = Field(description="Connector type key, e.g. 'rma', 'bexio', 'abacus'", json_schema_extra={"label": "System"})
|
||||
displayLabel: str = Field(default="", description="User-visible label for this integration", json_schema_extra={"label": "Bezeichnung"})
|
||||
encryptedConfig: str = Field(default="", description="Encrypted JSON blob with connector credentials", json_schema_extra={"label": "Verschlüsselte Konfiguration"})
|
||||
isActive: bool = Field(default=True, json_schema_extra={"label": "Aktiv"})
|
||||
lastSyncAt: Optional[float] = Field(default=None, description="Timestamp of last sync attempt", json_schema_extra={"label": "Letzte Synchronisation"})
|
||||
lastSyncAt: Optional[float] = Field(default=None, description="Timestamp of last sync attempt", json_schema_extra={"label": "Letzte Synchronisation", "frontend_type": "timestamp"})
|
||||
lastSyncStatus: Optional[str] = Field(default=None, description="Last sync result: success, error, partial", json_schema_extra={"label": "Status"})
|
||||
lastSyncErrorMessage: Optional[str] = Field(default=None, description="Error message when lastSyncStatus is error", json_schema_extra={"label": "Fehlermeldung"})
|
||||
lastSyncDateFrom: Optional[str] = Field(default=None, description="dateFrom (ISO date) of the last data import window", json_schema_extra={"label": "Letztes Import-Fenster von"})
|
||||
lastSyncDateTo: Optional[str] = Field(default=None, description="dateTo (ISO date) of the last data import window", json_schema_extra={"label": "Letztes Import-Fenster bis"})
|
||||
lastSyncCounts: Optional[Dict[str, int]] = Field(default=None, description="Per-entity counts of the last import (accounts, journalEntries, journalLines, contacts, accountBalances)", json_schema_extra={"label": "Letzte Import-Zaehler"})
|
||||
lastSyncDateFrom: Optional[float] = Field(default=None, description="dateFrom (UTC midnight unix timestamp) of the last data import window", json_schema_extra={"label": "Letztes Import-Fenster von", "frontend_type": "date"})
|
||||
lastSyncDateTo: Optional[float] = Field(default=None, description="dateTo (UTC midnight unix timestamp) of the last data import window", json_schema_extra={"label": "Letztes Import-Fenster bis", "frontend_type": "date"})
|
||||
lastSyncCounts: Optional[Dict[str, Any]] = Field(default=None, description="Last import summary: per-entity counts (accounts, journalEntries, journalLines, contacts, accountBalances) plus oldestBookingDate / newestBookingDate (ISO YYYY-MM-DD) for completeness verification", json_schema_extra={"label": "Letzte Import-Zaehler"})
|
||||
cachedChartOfAccounts: Optional[str] = Field(default=None, description="JSON-serialised chart of accounts cache (list of {accountNumber, label, accountType})", json_schema_extra={"label": "Cached Kontoplan"})
|
||||
chartCachedAt: Optional[float] = Field(default=None, description="Timestamp when cachedChartOfAccounts was last refreshed", json_schema_extra={"label": "Kontoplan-Cache-Zeitpunkt"})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||
chartCachedAt: Optional[float] = Field(default=None, description="Timestamp when cachedChartOfAccounts was last refreshed", json_schema_extra={"label": "Kontoplan-Cache-Zeitpunkt", "frontend_type": "timestamp"})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
|
||||
|
||||
@i18nModel("Position (Ansicht)")
|
||||
class TrusteePositionView(TrusteePosition):
|
||||
"""``TrusteePosition`` extended with computed display fields for the table.
|
||||
|
||||
The route enriches each row with the latest accounting-sync state so the
|
||||
frontend can render `syncStatus` (select) + `syncErrorMessage` (text) via
|
||||
`resolveColumnTypes` instead of hardcoded label maps in the page.
|
||||
"""
|
||||
|
||||
syncStatus: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Latest accounting-sync status for this position.",
|
||||
json_schema_extra={
|
||||
"label": "Synchronisierungsstatus",
|
||||
"frontend_type": "select",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_options": [
|
||||
{"value": "pending", "label": "Ausstehend"},
|
||||
{"value": "synced", "label": "Synchronisiert"},
|
||||
{"value": "error", "label": "Fehler"},
|
||||
{"value": "cancelled", "label": "Abgebrochen"},
|
||||
],
|
||||
},
|
||||
)
|
||||
syncErrorMessage: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Latest accounting-sync error message (if syncStatus == 'error').",
|
||||
json_schema_extra={
|
||||
"label": "Fehlermeldung",
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@i18nModel("Buchhaltungs-Synchronisation")
|
||||
class TrusteeAccountingSync(PowerOnModel):
|
||||
|
|
@ -846,16 +883,16 @@ class TrusteeAccountingSync(PowerOnModel):
|
|||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), json_schema_extra={"label": "ID"})
|
||||
positionId: str = Field(
|
||||
description="FK -> TrusteePosition.id",
|
||||
json_schema_extra={"label": "Position", "fk_target": {"db": "poweron_trustee", "table": "TrusteePosition"}},
|
||||
json_schema_extra={"label": "Position", "fk_target": {"db": "poweron_trustee", "table": "TrusteePosition", "labelField": None}},
|
||||
)
|
||||
featureInstanceId: str = Field(description="FK -> FeatureInstance.id", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}})
|
||||
featureInstanceId: str = Field(description="FK -> FeatureInstance.id", json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}})
|
||||
connectorType: str = Field(description="Connector type at time of sync", json_schema_extra={"label": "System"})
|
||||
externalId: Optional[str] = Field(default=None, description="ID assigned by the external system", json_schema_extra={"label": "Externe ID"})
|
||||
externalReference: Optional[str] = Field(default=None, description="Reference in the external system", json_schema_extra={"label": "Externe Referenz"})
|
||||
syncStatus: str = Field(default="pending", description="pending | synced | error | cancelled", json_schema_extra={"label": "Status"})
|
||||
syncDirection: str = Field(default="push", description="push (local->ext) or pull (ext->local)", json_schema_extra={"label": "Richtung"})
|
||||
syncedAt: Optional[float] = Field(default=None, description="Timestamp of successful sync", json_schema_extra={"label": "Synchronisiert am"})
|
||||
syncedAt: Optional[float] = Field(default=None, description="Timestamp of successful sync", json_schema_extra={"label": "Synchronisiert am", "frontend_type": "timestamp"})
|
||||
errorMessage: Optional[str] = Field(default=None, json_schema_extra={"label": "Fehler"})
|
||||
bookingPayload: Optional[dict] = Field(default=None, description="Payload sent to the external system (audit)", json_schema_extra={"label": "Buchungs-Payload"})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate"}})
|
||||
mandateId: Optional[str] = Field(default=None, json_schema_extra={"label": "Mandat", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}})
|
||||
|
||||
|
|
|
|||
|
|
@ -126,13 +126,11 @@ def _sanitisePositionPayload(data: Dict[str, Any]) -> Dict[str, Any]:
|
|||
"""Failsafe normalisation for TrusteePosition payloads before DB writes."""
|
||||
safeData = dict(data or {})
|
||||
|
||||
isoValuta = _normaliseIsoDate(safeData.get("valuta"))
|
||||
safeData["valuta"] = isoValuta
|
||||
valutaTs = _normaliseTimestamp(safeData.get("valuta"))
|
||||
safeData["valuta"] = valutaTs
|
||||
|
||||
safeData["transactionDateTime"] = _normaliseTimestamp(
|
||||
safeData.get("transactionDateTime"),
|
||||
fallbackIsoDate=isoValuta,
|
||||
)
|
||||
txTs = _normaliseTimestamp(safeData.get("transactionDateTime"))
|
||||
safeData["transactionDateTime"] = txTs if txTs is not None else valutaTs
|
||||
|
||||
safeData["bookingAmount"] = _toSafeFloat(safeData.get("bookingAmount"), defaultValue=0.0)
|
||||
safeData["originalAmount"] = _toSafeFloat(
|
||||
|
|
@ -148,7 +146,7 @@ def _sanitisePositionPayload(data: Dict[str, Any]) -> Dict[str, Any]:
|
|||
safeData["originalCurrency"] = str(originalCurrency).upper()
|
||||
|
||||
if "dueDate" in safeData and safeData["dueDate"]:
|
||||
safeData["dueDate"] = _normaliseIsoDate(safeData["dueDate"])
|
||||
safeData["dueDate"] = _normaliseTimestamp(safeData["dueDate"])
|
||||
|
||||
_VALID_DOC_TYPES = {"invoice", "expense_receipt", "bank_document", "contract", "unknown"}
|
||||
docType = safeData.get("documentType")
|
||||
|
|
@ -1109,10 +1107,15 @@ class TrusteeObjects:
|
|||
)
|
||||
|
||||
def _cleanDocumentRecords(records):
|
||||
return [
|
||||
TrusteeDocument(**{k: v for k, v in r.items() if not k.startswith("_") and k != "documentData"})
|
||||
for r in records
|
||||
]
|
||||
cleaned = []
|
||||
for r in records:
|
||||
labelCols = {k: v for k, v in r.items() if k.endswith("Label")}
|
||||
filteredFields = {k: v for k, v in r.items() if not k.startswith("_") and k != "documentData"}
|
||||
doc = TrusteeDocument(**filteredFields)
|
||||
d = doc.model_dump()
|
||||
d.update(labelCols)
|
||||
cleaned.append(d)
|
||||
return cleaned
|
||||
|
||||
if isinstance(result, PaginatedResult):
|
||||
result.items = _cleanDocumentRecords(result.items)
|
||||
|
|
@ -1133,10 +1136,15 @@ class TrusteeObjects:
|
|||
)
|
||||
|
||||
def _cleanDocumentRecords(records):
|
||||
return [
|
||||
TrusteeDocument(**{k: v for k, v in r.items() if not k.startswith("_") and k != "documentData"})
|
||||
for r in records
|
||||
]
|
||||
cleaned = []
|
||||
for r in records:
|
||||
labelCols = {k: v for k, v in r.items() if k.endswith("Label")}
|
||||
filteredFields = {k: v for k, v in r.items() if not k.startswith("_") and k != "documentData"}
|
||||
doc = TrusteeDocument(**filteredFields)
|
||||
d = doc.model_dump()
|
||||
d.update(labelCols)
|
||||
cleaned.append(d)
|
||||
return cleaned
|
||||
|
||||
if isinstance(result, PaginatedResult):
|
||||
result.items = _cleanDocumentRecords(result.items)
|
||||
|
|
@ -1297,10 +1305,13 @@ class TrusteeObjects:
|
|||
def _cleanAndValidate(records):
|
||||
items = []
|
||||
for record in records:
|
||||
labelCols = {k: v for k, v in record.items() if k.endswith("Label")}
|
||||
cleanedRecord = {k: v for k, v in record.items() if not k.startswith("_") or k in keepFields}
|
||||
position = self._toTrusteePositionOrDelete(cleanedRecord, deleteCorrupt=True)
|
||||
if position is not None:
|
||||
items.append(position)
|
||||
d = position.model_dump()
|
||||
d.update(labelCols)
|
||||
items.append(d)
|
||||
return items
|
||||
|
||||
if isinstance(result, PaginatedResult):
|
||||
|
|
|
|||
|
|
@ -394,9 +394,15 @@ TEMPLATE_WORKFLOWS = [
|
|||
{"id": "extract", "type": "trustee.extractFromFiles", "label": "Dokumente extrahieren", "_method": "trustee", "_action": "extractFromFiles",
|
||||
"parameters": {"featureInstanceId": "{{featureInstanceId}}", "prompt": ""}, "position": {"x": 250, "y": 0}},
|
||||
{"id": "process", "type": "trustee.processDocuments", "label": "Verarbeiten", "_method": "trustee", "_action": "processDocuments",
|
||||
"parameters": {"documentList": [], "featureInstanceId": "{{featureInstanceId}}"}, "position": {"x": 500, "y": 0}},
|
||||
"parameters": {
|
||||
"documentList": {"type": "ref", "nodeId": "extract", "path": ["documents"]},
|
||||
"featureInstanceId": "{{featureInstanceId}}",
|
||||
}, "position": {"x": 500, "y": 0}},
|
||||
{"id": "sync", "type": "trustee.syncToAccounting", "label": "Synchronisieren", "_method": "trustee", "_action": "syncToAccounting",
|
||||
"parameters": {"documentList": [], "featureInstanceId": "{{featureInstanceId}}"}, "position": {"x": 750, "y": 0}},
|
||||
"parameters": {
|
||||
"documentList": {"type": "ref", "nodeId": "process", "path": ["documents"]},
|
||||
"featureInstanceId": "{{featureInstanceId}}",
|
||||
}, "position": {"x": 750, "y": 0}},
|
||||
],
|
||||
"connections": [
|
||||
{"source": "trigger", "sourcePort": 0, "target": "extract", "targetPort": 0},
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ from .datamodelFeatureTrustee import (
|
|||
TrusteeContract,
|
||||
TrusteeDocument,
|
||||
TrusteePosition,
|
||||
TrusteePositionView,
|
||||
TrusteeDataAccount,
|
||||
TrusteeDataJournalEntry,
|
||||
TrusteeDataJournalLine,
|
||||
|
|
@ -209,6 +210,7 @@ _TRUSTEE_ENTITY_MODELS = {
|
|||
"TrusteeContract": TrusteeContract,
|
||||
"TrusteeDocument": TrusteeDocument,
|
||||
"TrusteePosition": TrusteePosition,
|
||||
"TrusteePositionView": TrusteePositionView,
|
||||
# Read-only sync tables (TrusteeData*) and accounting bookkeeping
|
||||
"TrusteeDataAccount": TrusteeDataAccount,
|
||||
"TrusteeDataJournalEntry": TrusteeDataJournalEntry,
|
||||
|
|
@ -393,9 +395,10 @@ def get_position_options(
|
|||
items = result.items if hasattr(result, 'items') else result
|
||||
|
||||
def _makePositionLabel(p: TrusteePosition) -> str:
|
||||
from datetime import datetime as _dt, timezone as _tz
|
||||
parts = []
|
||||
if p.valuta:
|
||||
parts.append(str(p.valuta)[:10]) # Datum ohne Zeit
|
||||
parts.append(_dt.fromtimestamp(p.valuta, tz=_tz.utc).strftime("%Y-%m-%d"))
|
||||
if p.company:
|
||||
parts.append(p.company[:30])
|
||||
if p.desc:
|
||||
|
|
@ -412,34 +415,41 @@ def get_position_options(
|
|||
|
||||
# ===== Organisation Routes =====
|
||||
|
||||
@router.get("/{instanceId}/organisations", response_model=PaginatedResponse[TrusteeOrganisation])
|
||||
@router.get("/{instanceId}/organisations")
|
||||
@limiter.limit("30/minute")
|
||||
def get_organisations(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams"),
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
) -> PaginatedResponse[TrusteeOrganisation]:
|
||||
):
|
||||
"""Get all organisations for a feature instance with optional pagination."""
|
||||
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
|
||||
paginationParams = _parsePagination(pagination)
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllOrganisations(paginationParams)
|
||||
|
||||
def _toDicts(items):
|
||||
return [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
|
||||
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
return PaginatedResponse(
|
||||
items=result.items,
|
||||
pagination=PaginationMetadata(
|
||||
enriched = enrichRowsWithFkLabels(_toDicts(result.items), TrusteeOrganisation)
|
||||
return {
|
||||
"items": enriched,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page or 1,
|
||||
pageSize=paginationParams.pageSize or 20,
|
||||
totalItems=result.totalItems,
|
||||
totalPages=result.totalPages,
|
||||
sort=paginationParams.sort if paginationParams else [],
|
||||
filters=paginationParams.filters if paginationParams else None
|
||||
)
|
||||
)
|
||||
return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
|
||||
).model_dump(),
|
||||
}
|
||||
items = result if isinstance(result, list) else result.items
|
||||
enriched = enrichRowsWithFkLabels(_toDicts(items), TrusteeOrganisation)
|
||||
return {"items": enriched, "pagination": None}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/organisations/{orgId}", response_model=TrusteeOrganisation)
|
||||
|
|
@ -525,34 +535,41 @@ def delete_organisation(
|
|||
|
||||
# ===== Role Routes =====
|
||||
|
||||
@router.get("/{instanceId}/roles", response_model=PaginatedResponse[TrusteeRole])
|
||||
@router.get("/{instanceId}/roles")
|
||||
@limiter.limit("30/minute")
|
||||
def get_roles(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||
pagination: Optional[str] = Query(None),
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
) -> PaginatedResponse[TrusteeRole]:
|
||||
):
|
||||
"""Get all roles with optional pagination."""
|
||||
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
|
||||
paginationParams = _parsePagination(pagination)
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllRoles(paginationParams)
|
||||
|
||||
def _toDicts(items):
|
||||
return [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
|
||||
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
return PaginatedResponse(
|
||||
items=result.items,
|
||||
pagination=PaginationMetadata(
|
||||
enriched = enrichRowsWithFkLabels(_toDicts(result.items), TrusteeRole)
|
||||
return {
|
||||
"items": enriched,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page or 1,
|
||||
pageSize=paginationParams.pageSize or 20,
|
||||
totalItems=result.totalItems,
|
||||
totalPages=result.totalPages,
|
||||
sort=paginationParams.sort if paginationParams else [],
|
||||
filters=paginationParams.filters if paginationParams else None
|
||||
)
|
||||
)
|
||||
return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
|
||||
).model_dump(),
|
||||
}
|
||||
items = result if isinstance(result, list) else result.items
|
||||
enriched = enrichRowsWithFkLabels(_toDicts(items), TrusteeRole)
|
||||
return {"items": enriched, "pagination": None}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/roles/{roleId}", response_model=TrusteeRole)
|
||||
|
|
@ -638,34 +655,41 @@ def delete_role(
|
|||
|
||||
# ===== Access Routes =====
|
||||
|
||||
@router.get("/{instanceId}/access", response_model=PaginatedResponse[TrusteeAccess])
|
||||
@router.get("/{instanceId}/access")
|
||||
@limiter.limit("30/minute")
|
||||
def get_all_access(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||
pagination: Optional[str] = Query(None),
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
) -> PaginatedResponse[TrusteeAccess]:
|
||||
):
|
||||
"""Get all access records with optional pagination."""
|
||||
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
|
||||
paginationParams = _parsePagination(pagination)
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllAccess(paginationParams)
|
||||
|
||||
def _toDicts(items):
|
||||
return [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
|
||||
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
return PaginatedResponse(
|
||||
items=result.items,
|
||||
pagination=PaginationMetadata(
|
||||
enriched = enrichRowsWithFkLabels(_toDicts(result.items), TrusteeAccess)
|
||||
return {
|
||||
"items": enriched,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page or 1,
|
||||
pageSize=paginationParams.pageSize or 20,
|
||||
totalItems=result.totalItems,
|
||||
totalPages=result.totalPages,
|
||||
sort=paginationParams.sort if paginationParams else [],
|
||||
filters=paginationParams.filters if paginationParams else None
|
||||
)
|
||||
)
|
||||
return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
|
||||
).model_dump(),
|
||||
}
|
||||
items = result if isinstance(result, list) else result.items
|
||||
enriched = enrichRowsWithFkLabels(_toDicts(items), TrusteeAccess)
|
||||
return {"items": enriched, "pagination": None}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/access/{accessId}", response_model=TrusteeAccess)
|
||||
|
|
@ -781,34 +805,41 @@ def delete_access(
|
|||
|
||||
# ===== Contract Routes =====
|
||||
|
||||
@router.get("/{instanceId}/contracts", response_model=PaginatedResponse[TrusteeContract])
|
||||
@router.get("/{instanceId}/contracts")
|
||||
@limiter.limit("30/minute")
|
||||
def get_contracts(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||
pagination: Optional[str] = Query(None),
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
) -> PaginatedResponse[TrusteeContract]:
|
||||
):
|
||||
"""Get all contracts with optional pagination."""
|
||||
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
|
||||
paginationParams = _parsePagination(pagination)
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllContracts(paginationParams)
|
||||
|
||||
def _toDicts(items):
|
||||
return [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
|
||||
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
return PaginatedResponse(
|
||||
items=result.items,
|
||||
pagination=PaginationMetadata(
|
||||
enriched = enrichRowsWithFkLabels(_toDicts(result.items), TrusteeContract)
|
||||
return {
|
||||
"items": enriched,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page or 1,
|
||||
pageSize=paginationParams.pageSize or 20,
|
||||
totalItems=result.totalItems,
|
||||
totalPages=result.totalPages,
|
||||
sort=paginationParams.sort if paginationParams else [],
|
||||
filters=paginationParams.filters if paginationParams else None
|
||||
)
|
||||
)
|
||||
return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
|
||||
).model_dump(),
|
||||
}
|
||||
items = result if isinstance(result, list) else result.items
|
||||
enriched = enrichRowsWithFkLabels(_toDicts(items), TrusteeContract)
|
||||
return {"items": enriched, "pagination": None}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/contracts/{contractId}", response_model=TrusteeContract)
|
||||
|
|
@ -909,7 +940,7 @@ def delete_contract(
|
|||
|
||||
# ===== Document Routes =====
|
||||
|
||||
@router.get("/{instanceId}/documents", response_model=PaginatedResponse[TrusteeDocument])
|
||||
@router.get("/{instanceId}/documents")
|
||||
@limiter.limit("30/minute")
|
||||
def get_documents(
|
||||
request: Request,
|
||||
|
|
@ -918,7 +949,7 @@ def get_documents(
|
|||
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
|
||||
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
) -> PaginatedResponse[TrusteeDocument]:
|
||||
):
|
||||
"""Get all documents (metadata only) with optional pagination."""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
|
||||
|
|
@ -929,52 +960,37 @@ def get_documents(
|
|||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllDocuments(paginationParams)
|
||||
|
||||
def _itemsToDicts(items):
|
||||
return [r.model_dump() if hasattr(r, 'model_dump') else r for r in items]
|
||||
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
return PaginatedResponse(
|
||||
items=result.items,
|
||||
pagination=PaginationMetadata(
|
||||
return {
|
||||
"items": _itemsToDicts(result.items),
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page or 1,
|
||||
pageSize=paginationParams.pageSize or 20,
|
||||
totalItems=result.totalItems,
|
||||
totalPages=result.totalPages,
|
||||
sort=paginationParams.sort if paginationParams else [],
|
||||
filters=paginationParams.filters if paginationParams else None
|
||||
)
|
||||
)
|
||||
return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
|
||||
).model_dump(),
|
||||
}
|
||||
items = result if isinstance(result, list) else result.items
|
||||
return {"items": _itemsToDicts(items), "pagination": None}
|
||||
|
||||
|
||||
def _handleDocumentMode(instanceId, mandateId, mode, column, pagination, context):
|
||||
"""Handle mode=filterValues and mode=ids for trustee documents."""
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
|
||||
from modules.routes.routeHelpers import handleIdsInMemory, handleFilterValuesInMemory, enrichRowsWithFkLabels
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
try:
|
||||
from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
from modules.routes.routeHelpers import parseCrossFilterPagination
|
||||
crossFilterPagination = parseCrossFilterPagination(column, pagination)
|
||||
from fastapi.responses import JSONResponse
|
||||
values = getDistinctColumnValuesWithRBAC(
|
||||
connector=interface.db,
|
||||
modelClass=TrusteeDocument,
|
||||
column=column,
|
||||
currentUser=interface.currentUser,
|
||||
pagination=crossFilterPagination,
|
||||
recordFilter=None,
|
||||
mandateId=interface.mandateId,
|
||||
featureInstanceId=interface.featureInstanceId,
|
||||
featureCode=interface.FEATURE_CODE
|
||||
)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
except Exception:
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllDocuments(None)
|
||||
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
|
||||
enrichRowsWithFkLabels(items, TrusteeDocument)
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
if mode == "ids":
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllDocuments(None)
|
||||
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
|
||||
return handleIdsInMemory(items, pagination)
|
||||
|
|
@ -1154,7 +1170,52 @@ def delete_document(
|
|||
|
||||
# ===== Position Routes =====
|
||||
|
||||
@router.get("/{instanceId}/positions", response_model=PaginatedResponse[TrusteePosition])
|
||||
def _buildSyncStatusByPosition(interface, instanceId: str) -> Dict[str, Dict[str, Optional[str]]]:
|
||||
"""Build a positionId -> {syncStatus, syncErrorMessage} map from
|
||||
`TrusteeAccountingSync` records for the given feature instance.
|
||||
|
||||
Preference order matches the historic UI logic: ``synced`` overrides
|
||||
``error``, so a successful retry hides an old failure. Any other status
|
||||
(`pending`, `cancelled`, ...) is kept verbatim.
|
||||
"""
|
||||
from .datamodelFeatureTrustee import TrusteeAccountingSync
|
||||
|
||||
syncRecords = interface.db.getRecordset(
|
||||
TrusteeAccountingSync, recordFilter={"featureInstanceId": instanceId}
|
||||
) or []
|
||||
|
||||
syncMap: Dict[str, Dict[str, Optional[str]]] = {}
|
||||
for rec in syncRecords:
|
||||
positionId = rec.get("positionId")
|
||||
if not positionId:
|
||||
continue
|
||||
status = rec.get("syncStatus")
|
||||
errorMessage = rec.get("errorMessage")
|
||||
current = syncMap.get(positionId)
|
||||
prefer = (
|
||||
current is None
|
||||
or status == "synced"
|
||||
or (current.get("syncStatus") != "synced" and status == "error")
|
||||
)
|
||||
if prefer:
|
||||
syncMap[positionId] = {
|
||||
"syncStatus": status,
|
||||
"syncErrorMessage": errorMessage,
|
||||
}
|
||||
return syncMap
|
||||
|
||||
|
||||
def _enrichPositionsWithSyncStatus(items: List[Dict[str, Any]], interface, instanceId: str) -> List[Dict[str, Any]]:
|
||||
"""In-place enrich each position dict with `syncStatus` + `syncErrorMessage`."""
|
||||
syncMap = _buildSyncStatusByPosition(interface, instanceId)
|
||||
for row in items:
|
||||
info = syncMap.get(row.get("id")) or {}
|
||||
row["syncStatus"] = info.get("syncStatus")
|
||||
row["syncErrorMessage"] = info.get("syncErrorMessage")
|
||||
return items
|
||||
|
||||
|
||||
@router.get("/{instanceId}/positions")
|
||||
@limiter.limit("30/minute")
|
||||
def get_positions(
|
||||
request: Request,
|
||||
|
|
@ -1163,7 +1224,7 @@ def get_positions(
|
|||
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
|
||||
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
) -> PaginatedResponse[TrusteePosition]:
|
||||
):
|
||||
"""Get all positions with optional pagination."""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
|
||||
|
|
@ -1174,54 +1235,47 @@ def get_positions(
|
|||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllPositions(paginationParams)
|
||||
|
||||
def _itemsToDicts(items):
|
||||
return [r.model_dump() if hasattr(r, 'model_dump') else r for r in items]
|
||||
|
||||
if paginationParams and hasattr(result, 'items'):
|
||||
return PaginatedResponse(
|
||||
items=result.items,
|
||||
pagination=PaginationMetadata(
|
||||
items = _itemsToDicts(result.items)
|
||||
_enrichPositionsWithSyncStatus(items, interface, instanceId)
|
||||
return {
|
||||
"items": items,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page or 1,
|
||||
pageSize=paginationParams.pageSize or 20,
|
||||
totalItems=result.totalItems,
|
||||
totalPages=result.totalPages,
|
||||
sort=paginationParams.sort if paginationParams else [],
|
||||
filters=paginationParams.filters if paginationParams else None
|
||||
)
|
||||
)
|
||||
return PaginatedResponse(items=result if isinstance(result, list) else result.items, pagination=None)
|
||||
).model_dump(),
|
||||
}
|
||||
rawItems = result if isinstance(result, list) else result.items
|
||||
items = _itemsToDicts(rawItems)
|
||||
_enrichPositionsWithSyncStatus(items, interface, instanceId)
|
||||
return {"items": items, "pagination": None}
|
||||
|
||||
|
||||
def _handlePositionMode(instanceId, mandateId, mode, column, pagination, context):
|
||||
"""Handle mode=filterValues and mode=ids for trustee positions."""
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
|
||||
from modules.routes.routeHelpers import handleIdsInMemory, handleFilterValuesInMemory, enrichRowsWithFkLabels
|
||||
from .datamodelFeatureTrustee import TrusteePositionView
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
try:
|
||||
from modules.interfaces.interfaceRbac import getDistinctColumnValuesWithRBAC
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
from modules.routes.routeHelpers import parseCrossFilterPagination
|
||||
crossFilterPagination = parseCrossFilterPagination(column, pagination)
|
||||
from fastapi.responses import JSONResponse
|
||||
values = getDistinctColumnValuesWithRBAC(
|
||||
connector=interface.db,
|
||||
modelClass=TrusteePosition,
|
||||
column=column,
|
||||
currentUser=interface.currentUser,
|
||||
pagination=crossFilterPagination,
|
||||
recordFilter=None,
|
||||
mandateId=interface.mandateId,
|
||||
featureInstanceId=interface.featureInstanceId,
|
||||
featureCode=interface.FEATURE_CODE
|
||||
)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
except Exception:
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllPositions(None)
|
||||
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
|
||||
_enrichPositionsWithSyncStatus(items, interface, instanceId)
|
||||
# Use the view model so FK labels for the synthetic columns also resolve.
|
||||
enrichRowsWithFkLabels(items, TrusteePositionView)
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
if mode == "ids":
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
result = interface.getAllPositions(None)
|
||||
items = [r.model_dump() if hasattr(r, 'model_dump') else r for r in (result.items if hasattr(result, 'items') else result)]
|
||||
_enrichPositionsWithSyncStatus(items, interface, instanceId)
|
||||
return handleIdsInMemory(items, pagination)
|
||||
|
||||
|
||||
|
|
@ -1347,8 +1401,8 @@ def get_available_accounting_connectors(
|
|||
) -> List[Dict[str, Any]]:
|
||||
"""List all available accounting system connectors with their config fields."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
from .accounting.accountingRegistry import _getAccountingRegistry
|
||||
return _getAccountingRegistry().getAvailableConnectors()
|
||||
from .accounting.accountingRegistry import getAccountingRegistry
|
||||
return getAccountingRegistry().getAvailableConnectors()
|
||||
|
||||
|
||||
# Placeholder returned for secret config fields so frontend can prefill form without sending real secrets.
|
||||
|
|
@ -1357,8 +1411,8 @@ _CONFIG_PLACEHOLDER = "***"
|
|||
|
||||
def _getConfigMasked(connectorType: str, plainConfig: Dict[str, Any]) -> Dict[str, str]:
|
||||
"""Build config with secret values replaced by placeholder for GET response."""
|
||||
from .accounting.accountingRegistry import _getAccountingRegistry
|
||||
connector = _getAccountingRegistry().getConnector(connectorType)
|
||||
from .accounting.accountingRegistry import getAccountingRegistry
|
||||
connector = getAccountingRegistry().getConnector(connectorType)
|
||||
if not connector:
|
||||
return {k: (v if isinstance(v, str) else str(v)) for k, v in (plainConfig or {}).items()}
|
||||
secretKeys = {f.key for f in connector.getRequiredConfigFields() if f.secret}
|
||||
|
|
@ -1562,38 +1616,84 @@ async def refresh_chart_of_accounts(
|
|||
return {"message": f"Chart of accounts refreshed: {len(charts)} entries", "count": len(charts)}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/accounting/sync")
|
||||
@limiter.limit("5/minute")
|
||||
async def sync_positions_to_accounting(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||
data: Dict[str, Any] = Body(...),
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
) -> Dict[str, Any]:
|
||||
"""Sync positions to the accounting system. Body: { positionIds: [...] }"""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
TRUSTEE_ACCOUNTING_PUSH_JOB_TYPE = "trusteeAccountingPush"
|
||||
|
||||
|
||||
async def _trusteeAccountingPushJobHandler(job: Dict[str, Any], progressCb) -> Dict[str, Any]:
|
||||
"""BackgroundJob handler: pushes a batch of positions to the external
|
||||
accounting system. Runs in the worker without blocking the original HTTP
|
||||
request, so the user can continue navigating while the sync runs.
|
||||
|
||||
Reads inputs from `job["payload"]` (`positionIds`) and reports incremental
|
||||
progress via `progressCb(percent, message)`. The job result has the same
|
||||
shape that the legacy synchronous endpoint used to return.
|
||||
"""
|
||||
from modules.security.rootAccess import getRootUser
|
||||
from .accounting.accountingBridge import AccountingBridge
|
||||
|
||||
instanceId = job["featureInstanceId"]
|
||||
mandateId = job["mandateId"]
|
||||
payload = job.get("payload") or {}
|
||||
positionIds: List[str] = list(payload.get("positionIds") or [])
|
||||
if not positionIds:
|
||||
return {"total": 0, "success": 0, "skipped": 0, "errors": 0, "results": []}
|
||||
|
||||
rootUser = getRootUser()
|
||||
interface = getInterface(rootUser, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
bridge = AccountingBridge(interface)
|
||||
|
||||
positionIds = data.get("positionIds", [])
|
||||
if not positionIds:
|
||||
raise HTTPException(status_code=400, detail=routeApiMsg("positionIds required"))
|
||||
results = []
|
||||
total = len(positionIds)
|
||||
progressCb(2, f"Sync wird vorbereitet ({total} Position(en))...")
|
||||
|
||||
# Resolve connector + plain config once to avoid decryption rate-limits
|
||||
# (mirrors the optimisation in pushBatchToAccounting). We push positions
|
||||
# one-by-one inside the job so we can emit incremental progress and so
|
||||
# one bad row never aborts the rest.
|
||||
from .accounting.accountingBridge import SyncResult
|
||||
try:
|
||||
connector, plainConfig, configRecord = await bridge._resolveConnectorAndConfig(instanceId)
|
||||
except Exception as resolveErr:
|
||||
logger.exception("Accounting push: failed to resolve connector/config")
|
||||
progressCb(100, "Verbindungsaufbau fehlgeschlagen.")
|
||||
raise resolveErr
|
||||
|
||||
if not connector or not plainConfig:
|
||||
results = [SyncResult(success=False, errorMessage="No active accounting configuration found") for _ in positionIds]
|
||||
progressCb(100, "Keine aktive Buchhaltungs-Konfiguration gefunden.")
|
||||
return {
|
||||
"total": len(results),
|
||||
"success": 0,
|
||||
"skipped": 0,
|
||||
"errors": len(results),
|
||||
"results": [r.model_dump() for r in results],
|
||||
}
|
||||
|
||||
for index, positionId in enumerate(positionIds, start=1):
|
||||
result = await bridge.pushPositionToAccounting(
|
||||
instanceId,
|
||||
positionId,
|
||||
_resolvedConnector=connector,
|
||||
_resolvedPlainConfig=plainConfig,
|
||||
_resolvedConfigRecord=configRecord,
|
||||
)
|
||||
results.append(result)
|
||||
# Reserve 5..95% for the push loop, keep the tail for summary.
|
||||
pct = 5 + int(90 * index / total)
|
||||
progressCb(pct, f"Position {index}/{total} verarbeitet")
|
||||
|
||||
results = await bridge.pushBatchToAccounting(instanceId, positionIds)
|
||||
skipped = [r for r in results if not r.success and r.errorMessage and "already synced" in r.errorMessage]
|
||||
failed = [r for r in results if not r.success and r not in skipped]
|
||||
if skipped:
|
||||
logger.info(
|
||||
"Accounting sync: %s position(s) already synced, skipped",
|
||||
len(skipped),
|
||||
)
|
||||
logger.info("Accounting sync: %s position(s) already synced, skipped", len(skipped))
|
||||
if failed:
|
||||
logger.warning(
|
||||
"Accounting sync had %s failure(s): %s",
|
||||
len(failed),
|
||||
"; ".join(r.errorMessage or "unknown" for r in failed[:3]),
|
||||
)
|
||||
|
||||
progressCb(100, "Sync abgeschlossen.")
|
||||
return {
|
||||
"total": len(results),
|
||||
"success": sum(1 for r in results if r.success),
|
||||
|
|
@ -1603,6 +1703,50 @@ async def sync_positions_to_accounting(
|
|||
}
|
||||
|
||||
|
||||
try:
|
||||
from modules.serviceCenter.services.serviceBackgroundJobs import registerJobHandler as _registerPushJobHandler
|
||||
_registerPushJobHandler(TRUSTEE_ACCOUNTING_PUSH_JOB_TYPE, _trusteeAccountingPushJobHandler)
|
||||
except Exception as _pushRegErr:
|
||||
logger.warning("Failed to register trusteeAccountingPush job handler: %s", _pushRegErr)
|
||||
|
||||
|
||||
@router.post("/{instanceId}/accounting/sync", status_code=status.HTTP_202_ACCEPTED)
|
||||
@limiter.limit("5/minute")
|
||||
async def sync_positions_to_accounting(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||
data: Dict[str, Any] = Body(...),
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
) -> Dict[str, Any]:
|
||||
"""Submit a background job that pushes positions to the accounting system.
|
||||
|
||||
Body: ``{ positionIds: [...] }``
|
||||
|
||||
Returns ``{ jobId, status: "pending" }`` immediately so the user is not
|
||||
blocked while the (potentially long) external accounting calls run.
|
||||
Clients poll ``GET /api/jobs/{jobId}`` until status is ``SUCCESS`` /
|
||||
``ERROR`` and then read the same ``{ total, success, skipped, errors,
|
||||
results }`` payload from ``job.result`` that the legacy synchronous
|
||||
endpoint returned.
|
||||
"""
|
||||
from modules.serviceCenter.services.serviceBackgroundJobs import startJob
|
||||
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
|
||||
positionIds = data.get("positionIds", [])
|
||||
if not positionIds:
|
||||
raise HTTPException(status_code=400, detail=routeApiMsg("positionIds required"))
|
||||
|
||||
jobId = await startJob(
|
||||
TRUSTEE_ACCOUNTING_PUSH_JOB_TYPE,
|
||||
{"positionIds": list(positionIds)},
|
||||
mandateId=mandateId,
|
||||
featureInstanceId=instanceId,
|
||||
triggeredBy=context.user.id if context.user else None,
|
||||
)
|
||||
return {"jobId": jobId, "status": "pending"}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/accounting/sync/{positionId}")
|
||||
@limiter.limit("10/minute")
|
||||
async def sync_single_position_to_accounting(
|
||||
|
|
@ -1774,13 +1918,87 @@ def clear_ai_data_cache(
|
|||
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> Dict[str, Any]:
|
||||
"""Clear the AI feature-data query cache for this instance so the next AI query reads fresh DB data."""
|
||||
"""Clear ONLY the AI feature-data query result cache (in-memory, ~5 min TTL).
|
||||
|
||||
Important: this does NOT touch the synchronised ``TrusteeData*`` tables.
|
||||
The synced rows (chart of accounts, journal entries/lines, contacts, balances)
|
||||
stay exactly as imported. To wipe those rows, use POST .../wipe-imported-data.
|
||||
"""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
from modules.serviceCenter.services.serviceAgent.coreTools._featureSubAgentTools import clearFeatureQueryCache
|
||||
removed = clearFeatureQueryCache(instanceId)
|
||||
return {"cleared": removed, "featureInstanceId": instanceId}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/accounting/wipe-imported-data")
|
||||
@limiter.limit("3/minute")
|
||||
def wipe_imported_accounting_data(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> Dict[str, Any]:
|
||||
"""Delete every ``TrusteeData*`` row imported for this feature instance.
|
||||
|
||||
Use when the source system was changed, test data needs to be cleared, or
|
||||
the user suspects stale rows from earlier connector versions. Also resets
|
||||
the ``lastSync*`` markers on the active config so the UI no longer reports
|
||||
a stale "letzter Import" status. The connector configuration / credentials
|
||||
remain untouched -- only synchronised payload data is removed.
|
||||
"""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
from .datamodelFeatureTrustee import (
|
||||
TrusteeDataAccount, TrusteeDataJournalEntry, TrusteeDataJournalLine,
|
||||
TrusteeDataContact, TrusteeDataAccountBalance, TrusteeAccountingConfig,
|
||||
)
|
||||
from modules.serviceCenter.services.serviceAgent.coreTools._featureSubAgentTools import clearFeatureQueryCache
|
||||
|
||||
removed: Dict[str, int] = {}
|
||||
for tableName, model in [
|
||||
("accounts", TrusteeDataAccount),
|
||||
("journalEntries", TrusteeDataJournalEntry),
|
||||
("journalLines", TrusteeDataJournalLine),
|
||||
("contacts", TrusteeDataContact),
|
||||
("accountBalances", TrusteeDataAccountBalance),
|
||||
]:
|
||||
try:
|
||||
removed[tableName] = int(interface.db.recordDeleteWhere(model, {"featureInstanceId": instanceId}) or 0)
|
||||
except Exception as ex:
|
||||
logger.warning("wipeImportedData: failed for %s: %s", tableName, ex)
|
||||
removed[tableName] = 0
|
||||
|
||||
cfgRecords = interface.db.getRecordset(
|
||||
TrusteeAccountingConfig,
|
||||
recordFilter={"featureInstanceId": instanceId, "isActive": True},
|
||||
)
|
||||
if cfgRecords:
|
||||
cfgId = cfgRecords[0].get("id")
|
||||
if cfgId:
|
||||
try:
|
||||
interface.db.recordModify(TrusteeAccountingConfig, cfgId, {
|
||||
"lastSyncAt": None,
|
||||
"lastSyncStatus": None,
|
||||
"lastSyncErrorMessage": None,
|
||||
"lastSyncDateFrom": None,
|
||||
"lastSyncDateTo": None,
|
||||
"lastSyncCounts": None,
|
||||
})
|
||||
except Exception as ex:
|
||||
logger.warning("wipeImportedData: failed to reset lastSync* on cfg %s: %s", cfgId, ex)
|
||||
|
||||
cacheCleared = clearFeatureQueryCache(instanceId)
|
||||
logger.info(
|
||||
"wipeImportedData instance=%s removed=%s cacheCleared=%s",
|
||||
instanceId, removed, cacheCleared,
|
||||
)
|
||||
return {
|
||||
"removed": removed,
|
||||
"totalRemoved": sum(removed.values()),
|
||||
"cacheCleared": cacheCleared,
|
||||
"featureInstanceId": instanceId,
|
||||
}
|
||||
|
||||
|
||||
# ===== Data Export =====
|
||||
|
||||
@router.get("/{instanceId}/accounting/export-data")
|
||||
|
|
@ -1917,13 +2135,13 @@ def _serializeRoleForApi(role) -> Dict[str, Any]:
|
|||
return payload
|
||||
|
||||
|
||||
@router.get("/{instanceId}/instance-roles", response_model=PaginatedResponse)
|
||||
@router.get("/{instanceId}/instance-roles")
|
||||
@limiter.limit("30/minute")
|
||||
def get_instance_roles(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
) -> PaginatedResponse:
|
||||
):
|
||||
"""
|
||||
Get all roles for this feature instance.
|
||||
Requires feature admin permission.
|
||||
|
|
@ -1931,14 +2149,9 @@ def get_instance_roles(
|
|||
mandateId = _validateInstanceAdmin(instanceId, context)
|
||||
|
||||
rootInterface = getRootInterface()
|
||||
|
||||
# Get instance-specific roles (Pydantic models)
|
||||
roles = rootInterface.getRolesByFeatureCode("trustee", featureInstanceId=instanceId)
|
||||
|
||||
return PaginatedResponse(
|
||||
items=[_serializeRoleForApi(r) for r in roles],
|
||||
pagination=None
|
||||
)
|
||||
return {"items": [_serializeRoleForApi(r) for r in roles], "pagination": None}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/instance-roles/{roleId}", response_model=Dict[str, Any])
|
||||
|
|
@ -1965,14 +2178,14 @@ def get_instance_role(
|
|||
return _serializeRoleForApi(role)
|
||||
|
||||
|
||||
@router.get("/{instanceId}/instance-roles/{roleId}/rules", response_model=PaginatedResponse)
|
||||
@router.get("/{instanceId}/instance-roles/{roleId}/rules")
|
||||
@limiter.limit("30/minute")
|
||||
def get_instance_role_rules(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature Instance ID"),
|
||||
roleId: str = Path(..., description="Role ID"),
|
||||
context: RequestContext = Depends(getRequestContext)
|
||||
) -> PaginatedResponse:
|
||||
):
|
||||
"""
|
||||
Get all AccessRules for a specific instance role.
|
||||
Requires feature admin permission.
|
||||
|
|
@ -1981,18 +2194,13 @@ def get_instance_role_rules(
|
|||
|
||||
rootInterface = getRootInterface()
|
||||
|
||||
# Verify role belongs to this instance (Pydantic model)
|
||||
role = rootInterface.getRole(roleId)
|
||||
if not role or str(role.featureInstanceId) != instanceId:
|
||||
raise HTTPException(status_code=404, detail=f"Role {roleId} not found in this instance")
|
||||
|
||||
# Get AccessRules for this role (Pydantic models)
|
||||
rules = rootInterface.getAccessRulesByRole(roleId)
|
||||
|
||||
return PaginatedResponse(
|
||||
items=[r.model_dump() for r in rules],
|
||||
pagination=None
|
||||
)
|
||||
return {"items": [r.model_dump() for r in rules], "pagination": None}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/instance-roles/{roleId}/rules", response_model=Dict[str, Any], status_code=201)
|
||||
|
|
@ -2148,6 +2356,63 @@ def delete_instance_role_rule(
|
|||
# (Unified Filter API: mode=filterValues / mode=ids).
|
||||
|
||||
|
||||
def _buildFeatureInternalResolvers(modelClass, db) -> Dict[str, Any]:
|
||||
"""Build ``extraResolvers`` for FK fields that point to other Trustee models.
|
||||
|
||||
The builtin ``enrichRowsWithFkLabels`` only covers Mandate / FeatureInstance /
|
||||
User / Role. Feature-internal FKs (e.g. ``journalEntryId`` -> ``TrusteeDataJournalEntry``)
|
||||
need a resolver that queries the Trustee DB. This function discovers such fields
|
||||
from the Pydantic model's ``fk_target`` annotations and creates a resolver per field.
|
||||
|
||||
Label strategy per target model:
|
||||
- ``TrusteeDataJournalEntry``: ``"<externalId> | <bookingDate>"``
|
||||
- Generic fallback: ``"<externalId>"`` or ``"<id[:8]>"``
|
||||
"""
|
||||
resolvers: Dict[str, Any] = {}
|
||||
for name, fieldInfo in modelClass.model_fields.items():
|
||||
extra = fieldInfo.json_schema_extra
|
||||
if not extra or not isinstance(extra, dict):
|
||||
continue
|
||||
tgt = extra.get("fk_target")
|
||||
if not isinstance(tgt, dict):
|
||||
continue
|
||||
tableName = tgt.get("table", "")
|
||||
if tableName not in _TRUSTEE_ENTITY_MODELS:
|
||||
continue
|
||||
targetModel = _TRUSTEE_ENTITY_MODELS[tableName]
|
||||
|
||||
def _makeResolver(model, field=name):
|
||||
def _resolve(ids: List[str]) -> Dict[str, Optional[str]]:
|
||||
result: Dict[str, Optional[str]] = {i: None for i in ids}
|
||||
try:
|
||||
recs = db.getRecordset(model, recordFilter={"id": list(set(ids))}) or []
|
||||
except Exception:
|
||||
return result
|
||||
for r in recs:
|
||||
row = r if isinstance(r, dict) else r.model_dump() if hasattr(r, "model_dump") else {}
|
||||
rid = row.get("id", "")
|
||||
parts = []
|
||||
for col in ("externalId", "reference", "bookingDate", "label", "name", "accountNumber"):
|
||||
val = row.get(col)
|
||||
if val is not None and val != "":
|
||||
if col == "bookingDate" and isinstance(val, (int, float)):
|
||||
from datetime import datetime, timezone
|
||||
try:
|
||||
parts.append(datetime.fromtimestamp(val, tz=timezone.utc).strftime("%Y-%m-%d"))
|
||||
except Exception:
|
||||
parts.append(str(val))
|
||||
else:
|
||||
parts.append(str(val))
|
||||
if len(parts) >= 2:
|
||||
break
|
||||
result[rid] = " | ".join(parts) if parts else rid[:8]
|
||||
return result
|
||||
return _resolve
|
||||
|
||||
resolvers[name] = _makeResolver(targetModel)
|
||||
return resolvers
|
||||
|
||||
|
||||
def _paginatedReadEndpoint(
|
||||
*,
|
||||
instanceId: str,
|
||||
|
|
@ -2166,14 +2431,12 @@ def _paginatedReadEndpoint(
|
|||
"""
|
||||
from modules.interfaces.interfaceRbac import (
|
||||
getRecordsetPaginatedWithRBAC,
|
||||
getDistinctColumnValuesWithRBAC,
|
||||
)
|
||||
from modules.routes.routeHelpers import (
|
||||
handleFilterValuesInMemory,
|
||||
handleIdsInMemory,
|
||||
parseCrossFilterPagination,
|
||||
handleFilterValuesInMemory,
|
||||
enrichRowsWithFkLabels,
|
||||
)
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
interface = getInterface(context.user, mandateId=mandateId, featureInstanceId=instanceId)
|
||||
|
|
@ -2181,21 +2444,6 @@ def _paginatedReadEndpoint(
|
|||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
try:
|
||||
crossFilterPagination = parseCrossFilterPagination(column, pagination)
|
||||
values = getDistinctColumnValuesWithRBAC(
|
||||
connector=interface.db,
|
||||
modelClass=modelClass,
|
||||
column=column,
|
||||
currentUser=interface.currentUser,
|
||||
pagination=crossFilterPagination,
|
||||
recordFilter=None,
|
||||
mandateId=interface.mandateId,
|
||||
featureInstanceId=interface.featureInstanceId,
|
||||
featureCode=interface.FEATURE_CODE,
|
||||
)
|
||||
return JSONResponse(content=sorted(values, key=lambda v: str(v).lower()))
|
||||
except Exception:
|
||||
result = getRecordsetPaginatedWithRBAC(
|
||||
connector=interface.db,
|
||||
modelClass=modelClass,
|
||||
|
|
@ -2206,8 +2454,10 @@ def _paginatedReadEndpoint(
|
|||
featureInstanceId=interface.featureInstanceId,
|
||||
featureCode=interface.FEATURE_CODE,
|
||||
)
|
||||
items = result.items if hasattr(result, "items") else result
|
||||
items = [r.model_dump() if hasattr(r, "model_dump") else r for r in items]
|
||||
rawItems = result.items if hasattr(result, "items") else result
|
||||
items = [r.model_dump() if hasattr(r, "model_dump") else r for r in rawItems]
|
||||
featureResolvers = _buildFeatureInternalResolvers(modelClass, interface.db)
|
||||
enrichRowsWithFkLabels(items, modelClass, extraResolvers=featureResolvers or None)
|
||||
return handleFilterValuesInMemory(items, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
|
|
@ -2237,23 +2487,36 @@ def _paginatedReadEndpoint(
|
|||
featureCode=interface.FEATURE_CODE,
|
||||
)
|
||||
|
||||
def _itemsToDicts(rawItems):
|
||||
return [r.model_dump() if hasattr(r, "model_dump") else r for r in rawItems]
|
||||
|
||||
featureResolvers = _buildFeatureInternalResolvers(modelClass, interface.db)
|
||||
|
||||
if paginationParams and hasattr(result, "items"):
|
||||
return PaginatedResponse(
|
||||
items=result.items,
|
||||
pagination=PaginationMetadata(
|
||||
enriched = enrichRowsWithFkLabels(
|
||||
_itemsToDicts(result.items), modelClass,
|
||||
extraResolvers=featureResolvers or None,
|
||||
)
|
||||
return {
|
||||
"items": enriched,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page or 1,
|
||||
pageSize=paginationParams.pageSize or 20,
|
||||
totalItems=result.totalItems,
|
||||
totalPages=result.totalPages,
|
||||
sort=paginationParams.sort if paginationParams else [],
|
||||
filters=paginationParams.filters if paginationParams else None,
|
||||
),
|
||||
)
|
||||
).model_dump(),
|
||||
}
|
||||
items = result.items if hasattr(result, "items") else result
|
||||
return PaginatedResponse(items=items, pagination=None)
|
||||
enriched = enrichRowsWithFkLabels(
|
||||
_itemsToDicts(items), modelClass,
|
||||
extraResolvers=featureResolvers or None,
|
||||
)
|
||||
return {"items": enriched, "pagination": None}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/data/accounts", response_model=PaginatedResponse[TrusteeDataAccount])
|
||||
@router.get("/{instanceId}/data/accounts")
|
||||
@limiter.limit("30/minute")
|
||||
def get_data_accounts(
|
||||
request: Request,
|
||||
|
|
@ -2274,7 +2537,7 @@ def get_data_accounts(
|
|||
)
|
||||
|
||||
|
||||
@router.get("/{instanceId}/data/journal-entries", response_model=PaginatedResponse[TrusteeDataJournalEntry])
|
||||
@router.get("/{instanceId}/data/journal-entries")
|
||||
@limiter.limit("30/minute")
|
||||
def get_data_journal_entries(
|
||||
request: Request,
|
||||
|
|
@ -2295,7 +2558,7 @@ def get_data_journal_entries(
|
|||
)
|
||||
|
||||
|
||||
@router.get("/{instanceId}/data/journal-lines", response_model=PaginatedResponse[TrusteeDataJournalLine])
|
||||
@router.get("/{instanceId}/data/journal-lines")
|
||||
@limiter.limit("30/minute")
|
||||
def get_data_journal_lines(
|
||||
request: Request,
|
||||
|
|
@ -2316,7 +2579,7 @@ def get_data_journal_lines(
|
|||
)
|
||||
|
||||
|
||||
@router.get("/{instanceId}/data/contacts", response_model=PaginatedResponse[TrusteeDataContact])
|
||||
@router.get("/{instanceId}/data/contacts")
|
||||
@limiter.limit("30/minute")
|
||||
def get_data_contacts(
|
||||
request: Request,
|
||||
|
|
@ -2337,7 +2600,7 @@ def get_data_contacts(
|
|||
)
|
||||
|
||||
|
||||
@router.get("/{instanceId}/data/account-balances", response_model=PaginatedResponse[TrusteeDataAccountBalance])
|
||||
@router.get("/{instanceId}/data/account-balances")
|
||||
@limiter.limit("30/minute")
|
||||
def get_data_account_balances(
|
||||
request: Request,
|
||||
|
|
@ -2358,7 +2621,7 @@ def get_data_account_balances(
|
|||
)
|
||||
|
||||
|
||||
@router.get("/{instanceId}/accounting/configs", response_model=PaginatedResponse[TrusteeAccountingConfig])
|
||||
@router.get("/{instanceId}/accounting/configs")
|
||||
@limiter.limit("30/minute")
|
||||
def get_accounting_configs(
|
||||
request: Request,
|
||||
|
|
@ -2384,7 +2647,7 @@ def get_accounting_configs(
|
|||
)
|
||||
|
||||
|
||||
@router.get("/{instanceId}/accounting/syncs", response_model=PaginatedResponse[TrusteeAccountingSync])
|
||||
@router.get("/{instanceId}/accounting/syncs")
|
||||
@limiter.limit("30/minute")
|
||||
def get_accounting_syncs(
|
||||
request: Request,
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class WorkspaceUserSettings(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
mandateId: str = Field(
|
||||
|
|
@ -34,7 +34,7 @@ class WorkspaceUserSettings(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -44,7 +44,7 @@ class WorkspaceUserSettings(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
maxAgentRounds: Optional[int] = Field(
|
||||
|
|
|
|||
|
|
@ -191,7 +191,7 @@ _SOURCE_TYPE_TO_SERVICE = {
|
|||
}
|
||||
|
||||
|
||||
def _buildDataSourceContext(chatService, dataSourceIds: List[str]) -> str:
|
||||
def buildDataSourceContext(chatService, dataSourceIds: List[str]) -> str:
|
||||
"""Build a description of active data sources for the agent prompt."""
|
||||
parts = [
|
||||
"The user has attached the following external data sources to this prompt.",
|
||||
|
|
@ -229,7 +229,7 @@ def _buildDataSourceContext(chatService, dataSourceIds: List[str]) -> str:
|
|||
return "\n".join(parts) if found else ""
|
||||
|
||||
|
||||
def _buildFeatureDataSourceContext(featureDataSourceIds: List[str]) -> str:
|
||||
def buildFeatureDataSourceContext(featureDataSourceIds: List[str]) -> str:
|
||||
"""Build a description of attached feature data sources for the agent prompt."""
|
||||
from modules.datamodels.datamodelFeatureDataSource import FeatureDataSource
|
||||
from modules.security.rbacCatalog import getCatalogService
|
||||
|
|
@ -735,12 +735,12 @@ async def _runWorkspaceAgent(
|
|||
|
||||
enrichedPrompt = prompt
|
||||
if dataSourceIds:
|
||||
dsInfo = _buildDataSourceContext(chatService, dataSourceIds)
|
||||
dsInfo = buildDataSourceContext(chatService, dataSourceIds)
|
||||
if dsInfo:
|
||||
enrichedPrompt = f"{prompt}\n\n[Active Data Sources]\n{dsInfo}"
|
||||
|
||||
if featureDataSourceIds:
|
||||
fdsInfo = _buildFeatureDataSourceContext(featureDataSourceIds)
|
||||
fdsInfo = buildFeatureDataSourceContext(featureDataSourceIds)
|
||||
if fdsInfo:
|
||||
enrichedPrompt = f"{enrichedPrompt}\n\n[Attached Feature Data Sources]\n{fdsInfo}"
|
||||
|
||||
|
|
|
|||
|
|
@ -139,7 +139,7 @@ def _bootstrapBilling() -> None:
|
|||
Idempotent: only creates missing settings/accounts.
|
||||
"""
|
||||
try:
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface as getBillingRootInterface
|
||||
from modules.interfaces.interfaceDbBilling import getRootInterface as getBillingRootInterface
|
||||
|
||||
billingInterface = getBillingRootInterface()
|
||||
|
||||
|
|
@ -210,7 +210,18 @@ def _buildSystemTemplates():
|
|||
"nodes": [
|
||||
{"id": "n1", "type": "trigger.schedule", "x": 50, "y": 200, "title": "Täglicher Check", "parameters": {}},
|
||||
{"id": "n2", "type": "email.checkEmail", "x": 300, "y": 200, "title": "Mailbox prüfen", "parameters": {}},
|
||||
{"id": "n3", "type": "flow.loop", "x": 550, "y": 200, "title": "Pro E-Mail", "parameters": {}},
|
||||
{
|
||||
"id": "n3",
|
||||
"type": "flow.loop",
|
||||
"x": 550,
|
||||
"y": 200,
|
||||
"title": "Pro E-Mail",
|
||||
"parameters": {
|
||||
"items": {"type": "ref", "nodeId": "n2", "path": ["emails"]},
|
||||
"level": "auto",
|
||||
"concurrency": 1,
|
||||
},
|
||||
},
|
||||
{"id": "n4", "type": "ai.prompt", "x": 800, "y": 200, "title": "Analyse: Antwort nötig?", "parameters": {}},
|
||||
{"id": "n5", "type": "flow.ifElse", "x": 1050, "y": 200, "title": "Antwort nötig?", "parameters": {}},
|
||||
{"id": "n6", "type": "ai.prompt", "x": 1300, "y": 100, "title": "Kontext abrufen & Antwort formulieren", "parameters": {}},
|
||||
|
|
@ -239,7 +250,18 @@ def _buildSystemTemplates():
|
|||
"nodes": [
|
||||
{"id": "n1", "type": "trigger.schedule", "x": 50, "y": 200, "title": "Geplanter Import", "parameters": {}},
|
||||
{"id": "n2", "type": "sharepoint.listFiles", "x": 300, "y": 200, "title": "SharePoint Ordner lesen", "parameters": {}},
|
||||
{"id": "n3", "type": "flow.loop", "x": 550, "y": 200, "title": "Pro Dokument", "parameters": {}},
|
||||
{
|
||||
"id": "n3",
|
||||
"type": "flow.loop",
|
||||
"x": 550,
|
||||
"y": 200,
|
||||
"title": "Pro Dokument",
|
||||
"parameters": {
|
||||
"items": {"type": "ref", "nodeId": "n2", "path": ["files"]},
|
||||
"level": "auto",
|
||||
"concurrency": 1,
|
||||
},
|
||||
},
|
||||
{"id": "n4", "type": "sharepoint.readFile", "x": 800, "y": 200, "title": "PDF-Inhalt lesen", "parameters": {}},
|
||||
{"id": "n5", "type": "ai.prompt", "x": 1050, "y": 200, "title": "Typ klassifizieren (Rechnung, Beleg, Bankauszug, Vertrag, etc.)", "parameters": {}},
|
||||
{"id": "n6", "type": "trustee.extractFromFiles", "x": 1300, "y": 200, "title": "Dokument extrahieren", "parameters": {}},
|
||||
|
|
@ -1946,11 +1968,11 @@ def initRootMandateBilling(mandateId: str) -> None:
|
|||
Creates mandate pool account and user audit accounts.
|
||||
"""
|
||||
try:
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
||||
from modules.interfaces.interfaceDbBilling import getRootInterface as getBillingRootInterface
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface as getAppRootInterface
|
||||
from modules.datamodels.datamodelBilling import BillingSettings
|
||||
|
||||
billingInterface = _getRootInterface()
|
||||
billingInterface = getBillingRootInterface()
|
||||
appInterface = getAppRootInterface()
|
||||
|
||||
existingSettings = billingInterface.getSettings(mandateId)
|
||||
|
|
@ -1990,7 +2012,7 @@ def _initRootMandateSubscription(mandateId: str) -> None:
|
|||
Called during bootstrap after billing init.
|
||||
"""
|
||||
try:
|
||||
from modules.interfaces.interfaceDbSubscription import _getRootInterface as getSubRootInterface
|
||||
from modules.interfaces.interfaceDbSubscription import getRootInterface as getSubRootInterface
|
||||
from modules.datamodels.datamodelSubscription import (
|
||||
MandateSubscription,
|
||||
SubscriptionStatusEnum,
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ from typing import Dict, Any, List, Optional, Union
|
|||
from passlib.context import CryptContext
|
||||
import uuid
|
||||
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector, _get_cached_connector
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector, getCachedConnector
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
from modules.shared.dbRegistry import registerDatabase
|
||||
from modules.shared.timeUtils import getUtcTimestamp, parseTimestamp
|
||||
|
|
@ -143,7 +143,7 @@ class AppObjects:
|
|||
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
|
||||
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
|
||||
|
||||
self.db = _get_cached_connector(
|
||||
self.db = getCachedConnector(
|
||||
dbHost=dbHost,
|
||||
dbDatabase=dbDatabase,
|
||||
dbUser=dbUser,
|
||||
|
|
@ -1594,23 +1594,24 @@ class AppObjects:
|
|||
if not adminRoleId:
|
||||
raise ValueError(f"No admin role found for mandate {mandateId} — cannot assign user without role")
|
||||
|
||||
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface as _getBillingRoot
|
||||
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
|
||||
from modules.interfaces.interfaceDbBilling import getRootInterface as _getBillingRoot
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
nowTs = now.timestamp()
|
||||
targetStatus = SubscriptionStatusEnum.TRIALING if plan.trialDays else SubscriptionStatusEnum.ACTIVE
|
||||
subscription = MandateSubscription(
|
||||
mandateId=mandateId,
|
||||
planKey=planKey,
|
||||
status=targetStatus,
|
||||
startedAt=now.isoformat(),
|
||||
currentPeriodStart=now.isoformat(),
|
||||
startedAt=nowTs,
|
||||
currentPeriodStart=nowTs,
|
||||
)
|
||||
if plan.trialDays:
|
||||
trialEnd = now + timedelta(days=plan.trialDays)
|
||||
subscription.trialEndsAt = trialEnd.isoformat()
|
||||
subscription.currentPeriodEnd = trialEnd.isoformat()
|
||||
subscription.trialEndsAt = trialEnd.timestamp()
|
||||
subscription.currentPeriodEnd = trialEnd.timestamp()
|
||||
|
||||
subInterface = _getSubRoot()
|
||||
subInterface.createSubscription(subscription)
|
||||
|
|
@ -1693,7 +1694,7 @@ class AppObjects:
|
|||
from modules.datamodels.datamodelSubscription import (
|
||||
SubscriptionStatusEnum, BUILTIN_PLANS,
|
||||
)
|
||||
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
|
||||
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
activated = 0
|
||||
|
|
@ -1716,19 +1717,19 @@ class AppObjects:
|
|||
|
||||
targetStatus = SubscriptionStatusEnum.TRIALING if plan and plan.trialDays else SubscriptionStatusEnum.ACTIVE
|
||||
additionalData = {
|
||||
"currentPeriodStart": now.isoformat(),
|
||||
"currentPeriodStart": now.timestamp(),
|
||||
}
|
||||
|
||||
if plan and plan.trialDays:
|
||||
trialEnd = now + timedelta(days=plan.trialDays)
|
||||
additionalData["trialEndsAt"] = trialEnd.isoformat()
|
||||
additionalData["currentPeriodEnd"] = trialEnd.isoformat()
|
||||
additionalData["trialEndsAt"] = trialEnd.timestamp()
|
||||
additionalData["currentPeriodEnd"] = trialEnd.timestamp()
|
||||
elif plan and plan.billingPeriod:
|
||||
from modules.datamodels.datamodelSubscription import BillingPeriodEnum
|
||||
if plan.billingPeriod == BillingPeriodEnum.MONTHLY:
|
||||
additionalData["currentPeriodEnd"] = (now + timedelta(days=30)).isoformat()
|
||||
additionalData["currentPeriodEnd"] = (now + timedelta(days=30)).timestamp()
|
||||
elif plan.billingPeriod == BillingPeriodEnum.YEARLY:
|
||||
additionalData["currentPeriodEnd"] = (now + timedelta(days=365)).isoformat()
|
||||
additionalData["currentPeriodEnd"] = (now + timedelta(days=365)).timestamp()
|
||||
|
||||
try:
|
||||
subInterface.transitionStatus(
|
||||
|
|
@ -1936,7 +1937,7 @@ class AppObjects:
|
|||
logger.info(f"Cascade: deleted {len(memberships)} UserMandates for mandate {mandateId}")
|
||||
|
||||
# 3. Cancel Stripe subscriptions + delete MandateSubscription records (poweron_billing)
|
||||
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
|
||||
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
|
||||
subInterface = _getSubRoot()
|
||||
subs = subInterface.listForMandate(mandateId)
|
||||
for sub in subs:
|
||||
|
|
@ -1954,7 +1955,7 @@ class AppObjects:
|
|||
logger.info(f"Cascade: deleted {len(subs)} subscriptions for mandate {mandateId}")
|
||||
|
||||
# 3b. Delete Billing data (poweron_billing)
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface as _getBillingRoot
|
||||
from modules.interfaces.interfaceDbBilling import getRootInterface as _getBillingRoot
|
||||
billingDb = _getBillingRoot().db
|
||||
billingAccounts = billingDb.getRecordset(BillingAccount, recordFilter={"mandateId": mandateId})
|
||||
for acc in billingAccounts:
|
||||
|
|
@ -2202,7 +2203,7 @@ class AppObjects:
|
|||
Balance is always on the mandate pool (PREPAY_MANDATE). User accounts are for audit trail only.
|
||||
"""
|
||||
try:
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface as getBillingRootInterface
|
||||
from modules.interfaces.interfaceDbBilling import getRootInterface as getBillingRootInterface
|
||||
|
||||
billingInterface = getBillingRootInterface()
|
||||
settings = billingInterface.getSettings(mandateId)
|
||||
|
|
|
|||
|
|
@ -134,7 +134,7 @@ def getInterface(currentUser: User, mandateId: str = None) -> "BillingObjects":
|
|||
return _billingInterfaces[cacheKey]
|
||||
|
||||
|
||||
def _getRootInterface() -> "BillingObjects":
|
||||
def getRootInterface() -> "BillingObjects":
|
||||
"""Get interface with system access for bootstrap operations."""
|
||||
from modules.security.rootAccess import getRootUser
|
||||
rootUser = getRootUser()
|
||||
|
|
@ -884,11 +884,12 @@ class BillingObjects:
|
|||
periodStartAt = periodStartAt.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
periodStartAt = periodStartAt.astimezone(timezone.utc)
|
||||
periodStartTs = periodStartAt.timestamp()
|
||||
settings = self.getOrCreateSettings(mandateId)
|
||||
prev = self._parseSettingsDateTime(settings.get("storagePeriodStartAt"))
|
||||
if prev is not None and abs((prev - periodStartAt).total_seconds()) < 2:
|
||||
prev = settings.get("storagePeriodStartAt")
|
||||
if prev is not None and abs(prev - periodStartTs) < 2:
|
||||
return
|
||||
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
|
||||
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
|
||||
|
||||
usedMB = float(_getSubRoot().getMandateDataVolumeMB(mandateId))
|
||||
self.updateSettings(
|
||||
|
|
@ -896,7 +897,7 @@ class BillingObjects:
|
|||
{
|
||||
"storageHighWatermarkMB": usedMB,
|
||||
"storageBilledUpToMB": 0.0,
|
||||
"storagePeriodStartAt": periodStartAt,
|
||||
"storagePeriodStartAt": periodStartTs,
|
||||
},
|
||||
)
|
||||
logger.info(
|
||||
|
|
@ -911,13 +912,13 @@ class BillingObjects:
|
|||
settings = self.getSettings(mandateId)
|
||||
if not settings:
|
||||
return None
|
||||
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
|
||||
from modules.datamodels.datamodelSubscription import _getPlan
|
||||
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
|
||||
from modules.datamodels.datamodelSubscription import getPlan
|
||||
|
||||
subIface = _getSubRoot()
|
||||
usedMB = float(subIface.getMandateDataVolumeMB(mandateId))
|
||||
sub = subIface.getOperativeForMandate(mandateId)
|
||||
plan = _getPlan(sub.get("planKey", "")) if sub else None
|
||||
plan = getPlan(sub.get("planKey", "")) if sub else None
|
||||
includedMB = plan.maxDataVolumeMB if plan and plan.maxDataVolumeMB is not None else None
|
||||
if includedMB is None:
|
||||
return None
|
||||
|
|
@ -971,13 +972,13 @@ class BillingObjects:
|
|||
Amount = budgetAiPerUserCHF * activeUsers (dynamic, not the static plan.budgetAiCHF).
|
||||
Should be called once per billing period (initial activation + each invoice.paid).
|
||||
Returns the created CREDIT transaction or None if budget is 0."""
|
||||
from modules.datamodels.datamodelSubscription import _getPlan
|
||||
from modules.datamodels.datamodelSubscription import getPlan
|
||||
|
||||
plan = _getPlan(planKey)
|
||||
plan = getPlan(planKey)
|
||||
if not plan or not plan.budgetAiPerUserCHF or plan.budgetAiPerUserCHF <= 0:
|
||||
return None
|
||||
|
||||
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
|
||||
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
|
||||
subRoot = _getSubRoot()
|
||||
activeUsers = max(subRoot.countActiveUsers(mandateId), 1)
|
||||
amount = plan.budgetAiPerUserCHF * activeUsers
|
||||
|
|
@ -1027,13 +1028,13 @@ class BillingObjects:
|
|||
delta > 0: user added -> CREDIT pro-rata portion
|
||||
delta < 0: user removed -> DEBIT pro-rata portion
|
||||
"""
|
||||
from modules.datamodels.datamodelSubscription import _getPlan
|
||||
from modules.datamodels.datamodelSubscription import getPlan
|
||||
|
||||
plan = _getPlan(planKey)
|
||||
plan = getPlan(planKey)
|
||||
if not plan or not plan.budgetAiPerUserCHF or plan.budgetAiPerUserCHF <= 0:
|
||||
return None
|
||||
|
||||
from modules.interfaces.interfaceDbSubscription import _getRootInterface as _getSubRoot
|
||||
from modules.interfaces.interfaceDbSubscription import getRootInterface as _getSubRoot
|
||||
subRoot = _getSubRoot()
|
||||
operative = subRoot.getOperativeForMandate(mandateId)
|
||||
if not operative:
|
||||
|
|
@ -1044,18 +1045,9 @@ class BillingObjects:
|
|||
if not periodStart or not periodEnd:
|
||||
return None
|
||||
|
||||
if isinstance(periodStart, str):
|
||||
periodStart = datetime.fromisoformat(periodStart)
|
||||
if isinstance(periodEnd, str):
|
||||
periodEnd = datetime.fromisoformat(periodEnd)
|
||||
if periodStart.tzinfo is None:
|
||||
periodStart = periodStart.replace(tzinfo=timezone.utc)
|
||||
if periodEnd.tzinfo is None:
|
||||
periodEnd = periodEnd.replace(tzinfo=timezone.utc)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
totalSeconds = (periodEnd - periodStart).total_seconds()
|
||||
remainingSeconds = max((periodEnd - now).total_seconds(), 0)
|
||||
nowTs = datetime.now(timezone.utc).timestamp()
|
||||
totalSeconds = periodEnd - periodStart
|
||||
remainingSeconds = max(periodEnd - nowTs, 0)
|
||||
proRataFraction = remainingSeconds / totalSeconds if totalSeconds > 0 else 0
|
||||
|
||||
amount = round(abs(delta) * plan.budgetAiPerUserCHF * proRataFraction, 2)
|
||||
|
|
@ -1221,7 +1213,7 @@ class BillingObjects:
|
|||
if not mandate or not getattr(mandate, "enabled", True):
|
||||
continue
|
||||
|
||||
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
|
||||
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None) or f"NA({mandateId})"
|
||||
|
||||
settings = self.getSettings(mandateId)
|
||||
if not settings:
|
||||
|
|
@ -1280,13 +1272,12 @@ class BillingObjects:
|
|||
if not userAccount:
|
||||
continue
|
||||
|
||||
# Get transactions for user's account (all transactions are on user accounts now)
|
||||
transactions = self.getTransactions(userAccount["id"], limit=limit)
|
||||
|
||||
mandate = appInterface.getMandate(mandateId)
|
||||
mandateName = ""
|
||||
mandateName = f"NA({mandateId})"
|
||||
if mandate:
|
||||
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
|
||||
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None) or f"NA({mandateId})"
|
||||
|
||||
for t in transactions:
|
||||
t["mandateId"] = mandateId
|
||||
|
|
@ -1333,9 +1324,9 @@ class BillingObjects:
|
|||
continue
|
||||
|
||||
mandate = appInterface.getMandate(mandateId)
|
||||
mandateName = ""
|
||||
mandateName = f"NA({mandateId})"
|
||||
if mandate:
|
||||
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
|
||||
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None) or f"NA({mandateId})"
|
||||
|
||||
allMandateAccounts = self.db.getRecordset(
|
||||
BillingAccount,
|
||||
|
|
@ -1387,11 +1378,10 @@ class BillingObjects:
|
|||
for mandateId in targetMandateIds:
|
||||
transactions = self.getTransactionsByMandate(mandateId, limit=limit)
|
||||
|
||||
# Get mandate name
|
||||
mandate = appInterface.getMandate(mandateId)
|
||||
mandateName = ""
|
||||
mandateName = f"NA({mandateId})"
|
||||
if mandate:
|
||||
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
|
||||
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None) or f"NA({mandateId})"
|
||||
|
||||
for t in transactions:
|
||||
t["mandateId"] = mandateId
|
||||
|
|
@ -1439,7 +1429,6 @@ class BillingObjects:
|
|||
for s in allSettings:
|
||||
settingsMap[s.get("mandateId")] = s
|
||||
|
||||
# Get user info efficiently
|
||||
userIds = list(set(acc.get("userId") for acc in allAccounts if acc.get("userId")))
|
||||
userMap = {}
|
||||
for userId in userIds:
|
||||
|
|
@ -1447,16 +1436,15 @@ class BillingObjects:
|
|||
if user:
|
||||
displayName = getattr(user, 'displayName', None) or (user.get("displayName") if isinstance(user, dict) else None)
|
||||
username = getattr(user, 'username', None) or (user.get("username") if isinstance(user, dict) else None)
|
||||
userMap[userId] = displayName or username or userId
|
||||
userMap[userId] = displayName or username or f"NA({userId})"
|
||||
|
||||
# Get mandate info efficiently
|
||||
mandateMap = {}
|
||||
mandateIdList = list(set(acc.get("mandateId") for acc in allAccounts if acc.get("mandateId")))
|
||||
for mandateId in mandateIdList:
|
||||
mandate = appInterface.getMandate(mandateId)
|
||||
if mandate:
|
||||
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
|
||||
mandateMap[mandateId] = mandateName
|
||||
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None)
|
||||
mandateMap[mandateId] = mandateName or f"NA({mandateId})"
|
||||
|
||||
for account in allAccounts:
|
||||
mandateId = account.get("mandateId")
|
||||
|
|
@ -1475,9 +1463,9 @@ class BillingObjects:
|
|||
balances.append({
|
||||
"accountId": account.get("id"),
|
||||
"mandateId": mandateId,
|
||||
"mandateName": mandateMap.get(mandateId, ""),
|
||||
"mandateName": mandateMap.get(mandateId) or (f"NA({mandateId})" if mandateId else None),
|
||||
"userId": userId,
|
||||
"userName": userMap.get(userId, userId),
|
||||
"userName": userMap.get(userId) or (f"NA({userId})" if userId else None),
|
||||
"balance": balance,
|
||||
"warningThreshold": warningThreshold,
|
||||
"isWarning": balance <= warningThreshold,
|
||||
|
|
@ -1492,7 +1480,7 @@ class BillingObjects:
|
|||
@staticmethod
|
||||
def _mapPaginationColumns(pagination: PaginationParams) -> PaginationParams:
|
||||
"""Remap frontend column names to DB column names in filters and sort."""
|
||||
_COL_MAP = {"createdAt": "sysCreatedAt"}
|
||||
_COL_MAP: dict = {}
|
||||
_ENRICHED_COLS = {"mandateName", "userName", "mandateId", "userId"}
|
||||
import copy
|
||||
p = copy.deepcopy(pagination)
|
||||
|
|
@ -1596,14 +1584,14 @@ class BillingObjects:
|
|||
if pageUserIds:
|
||||
users = appInterface.getUsersByIds(list(pageUserIds))
|
||||
for uid, u in users.items():
|
||||
dn = getattr(u, "displayName", None) or getattr(u, "username", None) or uid
|
||||
dn = getattr(u, "displayName", None) or getattr(u, "username", None) or f"NA({uid})"
|
||||
userMap[uid] = dn
|
||||
|
||||
mandateMap: Dict[str, str] = {}
|
||||
if pageMandateIds:
|
||||
mandates = appInterface.getMandatesByIds(list(pageMandateIds))
|
||||
for mid, m in mandates.items():
|
||||
mandateMap[mid] = getattr(m, "label", None) or getattr(m, "name", None) or mid
|
||||
mandateMap[mid] = getattr(m, "label", None) or getattr(m, "name", None) or f"NA({mid})"
|
||||
|
||||
enriched = []
|
||||
for t in pageItems:
|
||||
|
|
@ -1613,9 +1601,9 @@ class BillingObjects:
|
|||
mid = acc.get("mandateId")
|
||||
txUserId = row.get("createdByUserId") or acc.get("userId")
|
||||
row["mandateId"] = mid
|
||||
row["mandateName"] = mandateMap.get(mid, "")
|
||||
row["mandateName"] = mandateMap.get(mid) or (f"NA({mid})" if mid else None)
|
||||
row["userId"] = txUserId
|
||||
row["userName"] = userMap.get(txUserId, txUserId) if txUserId else None
|
||||
row["userName"] = userMap.get(txUserId) or (f"NA({txUserId})" if txUserId else None)
|
||||
enriched.append(row)
|
||||
|
||||
return PaginatedResult(items=enriched, totalItems=totalItems, totalPages=totalPages)
|
||||
|
|
@ -1639,12 +1627,12 @@ class BillingObjects:
|
|||
first, then builds a single SQL query with OR-combined conditions.
|
||||
"""
|
||||
import math
|
||||
from modules.connectors.connectorDbPostgre import _get_model_fields, _parseRecordFields
|
||||
from modules.connectors.connectorDbPostgre import getModelFields, parseRecordFields
|
||||
from modules.datamodels.datamodelUam import UserInDB
|
||||
from modules.interfaces.interfaceDbApp import getInterface as getAppInterface
|
||||
|
||||
table = BillingTransaction.__name__
|
||||
fields = _get_model_fields(BillingTransaction)
|
||||
fields = getModelFields(BillingTransaction)
|
||||
pattern = f"%{searchTerm}%"
|
||||
|
||||
# Resolve matching user / mandate IDs via the app DB (which is separate
|
||||
|
|
@ -1785,7 +1773,7 @@ class BillingObjects:
|
|||
records = [dict(row) for row in cur.fetchall()]
|
||||
|
||||
for rec in records:
|
||||
_parseRecordFields(rec, fields, f"search table {table}")
|
||||
parseRecordFields(rec, fields, f"search table {table}")
|
||||
|
||||
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
|
||||
return {"items": records, "totalItems": totalItems, "totalPages": totalPages}
|
||||
|
|
@ -1978,7 +1966,6 @@ class BillingObjects:
|
|||
) -> List[str]:
|
||||
"""SQL DISTINCT for filter-values on BillingTransaction, scoped by mandates."""
|
||||
_COLUMN_MAP = {
|
||||
"createdAt": "sysCreatedAt",
|
||||
"mandateId": "accountId",
|
||||
"mandateName": "accountId",
|
||||
}
|
||||
|
|
@ -2023,7 +2010,7 @@ class BillingObjects:
|
|||
appInterface = getAppInterface(self.currentUser)
|
||||
mandates = appInterface.getMandatesByIds(mandateIds)
|
||||
return sorted(
|
||||
{getattr(m, "label", None) or getattr(m, "name", "") or mid for mid, m in mandates.items()},
|
||||
{getattr(m, "label", None) or getattr(m, "name", None) or f"NA({mid})" for mid, m in mandates.items()},
|
||||
key=lambda v: v.lower(),
|
||||
)
|
||||
|
||||
|
|
@ -2035,7 +2022,7 @@ class BillingObjects:
|
|||
appInterface = getAppInterface(self.currentUser)
|
||||
users = appInterface.getUsersByIds(values)
|
||||
return sorted(
|
||||
{getattr(u, "displayName", None) or getattr(u, "username", None) or uid for uid, u in users.items()},
|
||||
{getattr(u, "displayName", None) or getattr(u, "username", None) or f"NA({uid})" for uid, u in users.items()},
|
||||
key=lambda v: v.lower(),
|
||||
)
|
||||
|
||||
|
|
@ -2075,7 +2062,6 @@ class BillingObjects:
|
|||
"userId": acc.get("userId")
|
||||
}
|
||||
|
||||
# Get user info efficiently
|
||||
userIds = list(set(acc.get("userId") for acc in allAccounts if acc.get("userId")))
|
||||
userMap = {}
|
||||
for userId in userIds:
|
||||
|
|
@ -2083,16 +2069,15 @@ class BillingObjects:
|
|||
if user:
|
||||
displayName = getattr(user, 'displayName', None) or (user.get("displayName") if isinstance(user, dict) else None)
|
||||
username = getattr(user, 'username', None) or (user.get("username") if isinstance(user, dict) else None)
|
||||
userMap[userId] = displayName or username or userId
|
||||
userMap[userId] = displayName or username or f"NA({userId})"
|
||||
|
||||
# Get mandate info efficiently
|
||||
mandateMap = {}
|
||||
mandateIdList = list(set(acc.get("mandateId") for acc in allAccounts if acc.get("mandateId")))
|
||||
for mandateId in mandateIdList:
|
||||
mandate = appInterface.getMandate(mandateId)
|
||||
if mandate:
|
||||
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", "") if isinstance(mandate, dict) else "")
|
||||
mandateMap[mandateId] = mandateName
|
||||
mandateName = getattr(mandate, 'label', None) or getattr(mandate, 'name', None) or (mandate.get("label") or mandate.get("name", None) if isinstance(mandate, dict) else None)
|
||||
mandateMap[mandateId] = mandateName or f"NA({mandateId})"
|
||||
|
||||
# Get transactions for all accounts and collect createdByUserIds
|
||||
rawTransactions = []
|
||||
|
|
@ -2123,18 +2108,16 @@ class BillingObjects:
|
|||
if user:
|
||||
displayName = getattr(user, 'displayName', None) or (user.get("displayName") if isinstance(user, dict) else None)
|
||||
username = getattr(user, 'username', None) or (user.get("username") if isinstance(user, dict) else None)
|
||||
userMap[uid] = displayName or username or uid
|
||||
userMap[uid] = displayName or username or f"NA({uid})"
|
||||
|
||||
# Enrich transactions
|
||||
for t in rawTransactions:
|
||||
mandateId = t.pop("_accountMandateId", None)
|
||||
accountUserId = t.pop("_accountUserId", None)
|
||||
t["mandateId"] = mandateId
|
||||
t["mandateName"] = mandateMap.get(mandateId, "")
|
||||
# Prefer createdByUserId (per-transaction) over account-derived userId
|
||||
t["mandateName"] = mandateMap.get(mandateId) or (f"NA({mandateId})" if mandateId else None)
|
||||
txUserId = t.get("createdByUserId") or accountUserId
|
||||
t["userId"] = txUserId
|
||||
t["userName"] = userMap.get(txUserId, txUserId) if txUserId else None
|
||||
t["userName"] = userMap.get(txUserId) or (f"NA({txUserId})" if txUserId else None)
|
||||
allTransactions.append(t)
|
||||
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -62,13 +62,13 @@ def storeDebugMessageAndDocuments(message, currentUser, mandateId=None, featureI
|
|||
try:
|
||||
import os
|
||||
from datetime import datetime, UTC
|
||||
from modules.shared.debugLogger import _getBaseDebugDir, _ensureDir
|
||||
from modules.shared.debugLogger import getBaseDebugDir, ensureDir
|
||||
from modules.interfaces.interfaceDbManagement import getInterface
|
||||
|
||||
# Create base debug directory (use base debug dir, not prompts subdirectory)
|
||||
baseDebugDir = _getBaseDebugDir()
|
||||
baseDebugDir = getBaseDebugDir()
|
||||
debug_root = os.path.join(baseDebugDir, 'messages')
|
||||
_ensureDir(debug_root)
|
||||
ensureDir(debug_root)
|
||||
|
||||
# Generate timestamp
|
||||
timestamp = datetime.now(UTC).strftime('%Y%m%d-%H%M%S-%f')[:-3]
|
||||
|
|
@ -133,7 +133,7 @@ def storeDebugMessageAndDocuments(message, currentUser, mandateId=None, featureI
|
|||
safe_label = "default"
|
||||
|
||||
label_folder = os.path.join(message_path, safe_label)
|
||||
_ensureDir(label_folder)
|
||||
ensureDir(label_folder)
|
||||
|
||||
# Store each document
|
||||
for i, doc in enumerate(docs):
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ from collections import defaultdict
|
|||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Dict, Any, List, Optional
|
||||
|
||||
from modules.connectors.connectorDbPostgre import _get_cached_connector
|
||||
from modules.connectors.connectorDbPostgre import getCachedConnector
|
||||
from modules.shared.dbRegistry import registerDatabase
|
||||
from modules.datamodels.datamodelKnowledge import FileContentIndex, ContentChunk, RoundMemory, WorkflowMemory
|
||||
from modules.datamodels.datamodelUam import User
|
||||
|
|
@ -43,7 +43,7 @@ class KnowledgeObjects:
|
|||
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
|
||||
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
|
||||
|
||||
self.db = _get_cached_connector(
|
||||
self.db = getCachedConnector(
|
||||
dbHost=dbHost,
|
||||
dbDatabase=dbDatabase,
|
||||
dbUser=dbUser,
|
||||
|
|
@ -103,9 +103,9 @@ class KnowledgeObjects:
|
|||
ok = self.db.recordDelete(FileContentIndex, fileId)
|
||||
if ok and mandateId:
|
||||
try:
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
||||
from modules.interfaces.interfaceDbBilling import getRootInterface
|
||||
|
||||
_getRootInterface().reconcileMandateStorageBilling(str(mandateId))
|
||||
getRootInterface().reconcileMandateStorageBilling(str(mandateId))
|
||||
except Exception as ex:
|
||||
logger.warning("reconcileMandateStorageBilling after delete failed: %s", ex)
|
||||
return ok
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ import math
|
|||
import mimetypes
|
||||
from typing import Dict, Any, List, Optional, Union
|
||||
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector, _get_cached_connector
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector, getCachedConnector
|
||||
from modules.shared.dbRegistry import registerDatabase
|
||||
from modules.interfaces.interfaceRbac import getRecordsetWithRBAC, getRecordsetPaginatedWithRBAC
|
||||
from modules.security.rbac import RbacClass
|
||||
|
|
@ -136,7 +136,7 @@ class ComponentObjects:
|
|||
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
|
||||
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
|
||||
|
||||
self.db = _get_cached_connector(
|
||||
self.db = getCachedConnector(
|
||||
dbHost=dbHost,
|
||||
dbDatabase=dbDatabase,
|
||||
dbUser=dbUser,
|
||||
|
|
@ -992,8 +992,11 @@ class ComponentObjects:
|
|||
if file.get("neutralize") is None:
|
||||
file["neutralize"] = False
|
||||
|
||||
labelCols = {k: v for k, v in file.items() if k.endswith("Label")}
|
||||
fileItem = FileItem(**file)
|
||||
fileItems.append(fileItem)
|
||||
itemDict = fileItem.model_dump()
|
||||
itemDict.update(labelCols)
|
||||
fileItems.append(itemDict)
|
||||
except Exception as e:
|
||||
logger.warning(f"Skipping invalid file record: {str(e)}")
|
||||
continue
|
||||
|
|
@ -1347,8 +1350,8 @@ class ComponentObjects:
|
|||
folderIds = [f["id"] for f in folders if f.get("id")]
|
||||
fileCounts: Dict[str, int] = {}
|
||||
try:
|
||||
from modules.interfaces.interfaceRbac import _buildFilesScopeWhereClause
|
||||
scopeClause = _buildFilesScopeWhereClause(
|
||||
from modules.interfaces.interfaceRbac import buildFilesScopeWhereClause
|
||||
scopeClause = buildFilesScopeWhereClause(
|
||||
self.currentUser, "FileItem", self.db,
|
||||
self.mandateId, self.featureInstanceId,
|
||||
[], [],
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ from modules.datamodels.datamodelSubscription import (
|
|||
TERMINAL_STATUSES,
|
||||
OPERATIVE_STATUSES,
|
||||
BUILTIN_PLANS,
|
||||
_getPlan,
|
||||
getPlan as getPlanFromCatalog,
|
||||
_getSelectablePlans,
|
||||
)
|
||||
|
||||
|
|
@ -55,7 +55,7 @@ def getInterface(currentUser: User, mandateId: str = None) -> "SubscriptionObjec
|
|||
return _subscriptionInterfaces[cacheKey]
|
||||
|
||||
|
||||
def _getRootInterface() -> "SubscriptionObjects":
|
||||
def getRootInterface() -> "SubscriptionObjects":
|
||||
from modules.security.rootAccess import getRootUser
|
||||
return SubscriptionObjects(getRootUser(), mandateId=None)
|
||||
|
||||
|
|
@ -96,7 +96,7 @@ class SubscriptionObjects:
|
|||
# =========================================================================
|
||||
|
||||
def getPlan(self, planKey: str) -> Optional[SubscriptionPlan]:
|
||||
return _getPlan(planKey)
|
||||
return getPlanFromCatalog(planKey)
|
||||
|
||||
def getSelectablePlans(self) -> List[SubscriptionPlan]:
|
||||
return _getSelectablePlans()
|
||||
|
|
@ -224,7 +224,7 @@ class SubscriptionObjects:
|
|||
|
||||
updateData = {"status": toStatus.value}
|
||||
if toStatus in TERMINAL_STATUSES and not (additionalData or {}).get("endedAt"):
|
||||
updateData["endedAt"] = datetime.now(timezone.utc).isoformat()
|
||||
updateData["endedAt"] = datetime.now(timezone.utc).timestamp()
|
||||
if additionalData:
|
||||
updateData.update(additionalData)
|
||||
|
||||
|
|
@ -244,7 +244,7 @@ class SubscriptionObjects:
|
|||
|
||||
result = self.db.recordModify(MandateSubscription, subscriptionId, {
|
||||
"status": SubscriptionStatusEnum.EXPIRED.value,
|
||||
"endedAt": datetime.now(timezone.utc).isoformat(),
|
||||
"endedAt": datetime.now(timezone.utc).timestamp(),
|
||||
})
|
||||
logger.info("Force-expired subscription %s (was %s)", subscriptionId, currentStatus)
|
||||
return result
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ GROUP-Berechtigung:
|
|||
import logging
|
||||
import json
|
||||
import math
|
||||
import re
|
||||
from typing import List, Dict, Any, Optional, Type, Union
|
||||
from pydantic import BaseModel
|
||||
from modules.datamodels.datamodelRbac import AccessRuleContext
|
||||
|
|
@ -35,6 +36,138 @@ from modules.security.rootAccess import getRootDbAppConnector
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_ISO_DATE_RE = re.compile(r"^\d{4}-\d{2}-\d{2}$")
|
||||
|
||||
|
||||
def _rbacAppendPaginationDictFilter(
|
||||
key: str,
|
||||
val: Dict[str, Any],
|
||||
colType: str,
|
||||
whereConditions: List[str],
|
||||
whereValues: List[Any],
|
||||
) -> None:
|
||||
"""Append SQL for one pagination ``filters`` dict entry (operator + value).
|
||||
|
||||
Mirrors ``connectorDbPostgre._buildPaginationClauses`` semantics so numeric
|
||||
comparisons use ``::double precision`` instead of lexicographic ``::TEXT``.
|
||||
"""
|
||||
op = val.get("operator", "equals")
|
||||
v = val.get("value", "")
|
||||
isNumericCol = colType in ("INTEGER", "DOUBLE PRECISION")
|
||||
|
||||
if op in ("equals", "eq"):
|
||||
if colType == "BOOLEAN":
|
||||
whereConditions.append(f'COALESCE("{key}", FALSE) = %s')
|
||||
whereValues.append(str(v).lower() == "true")
|
||||
elif isNumericCol:
|
||||
try:
|
||||
whereConditions.append(f'"{key}"::double precision = %s')
|
||||
whereValues.append(float(v))
|
||||
except (ValueError, TypeError):
|
||||
whereConditions.append(f'"{key}"::TEXT = %s')
|
||||
whereValues.append(str(v))
|
||||
else:
|
||||
whereConditions.append(f'"{key}"::TEXT = %s')
|
||||
whereValues.append(str(v))
|
||||
return
|
||||
|
||||
if op == "contains":
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(f"%{v}%")
|
||||
return
|
||||
if op == "startsWith":
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(f"{v}%")
|
||||
return
|
||||
if op == "endsWith":
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(f"%{v}")
|
||||
return
|
||||
|
||||
if op in ("gt", "gte", "lt", "lte"):
|
||||
sqlOp = {"gt": ">", "gte": ">=", "lt": "<", "lte": "<="}[op]
|
||||
if isNumericCol:
|
||||
try:
|
||||
whereConditions.append(f'"{key}"::double precision {sqlOp} %s')
|
||||
whereValues.append(float(v))
|
||||
except (ValueError, TypeError):
|
||||
whereConditions.append(f'"{key}"::TEXT {sqlOp} %s')
|
||||
whereValues.append(str(v))
|
||||
else:
|
||||
whereConditions.append(f'"{key}"::TEXT {sqlOp} %s')
|
||||
whereValues.append(str(v))
|
||||
return
|
||||
|
||||
if op == "between" and isinstance(v, dict):
|
||||
fromVal = v.get("from", "")
|
||||
toVal = v.get("to", "")
|
||||
if not fromVal and not toVal:
|
||||
return
|
||||
isDateVal = bool(fromVal and _ISO_DATE_RE.match(str(fromVal))) or bool(
|
||||
toVal and _ISO_DATE_RE.match(str(toVal))
|
||||
)
|
||||
if isNumericCol and isDateVal:
|
||||
from datetime import datetime as _dt, timezone as _tz
|
||||
if fromVal and toVal:
|
||||
fromTs = _dt.strptime(str(fromVal), "%Y-%m-%d").replace(tzinfo=_tz.utc).timestamp()
|
||||
toTs = _dt.strptime(str(toVal), "%Y-%m-%d").replace(
|
||||
hour=23, minute=59, second=59, tzinfo=_tz.utc
|
||||
).timestamp()
|
||||
whereConditions.append(f'"{key}" >= %s AND "{key}" <= %s')
|
||||
whereValues.extend([fromTs, toTs])
|
||||
elif fromVal:
|
||||
fromTs = _dt.strptime(str(fromVal), "%Y-%m-%d").replace(tzinfo=_tz.utc).timestamp()
|
||||
whereConditions.append(f'"{key}" >= %s')
|
||||
whereValues.append(fromTs)
|
||||
else:
|
||||
toTs = _dt.strptime(str(toVal), "%Y-%m-%d").replace(
|
||||
hour=23, minute=59, second=59, tzinfo=_tz.utc
|
||||
).timestamp()
|
||||
whereConditions.append(f'"{key}" <= %s')
|
||||
whereValues.append(toTs)
|
||||
elif isNumericCol:
|
||||
try:
|
||||
if fromVal and toVal:
|
||||
whereConditions.append(
|
||||
f'"{key}"::double precision >= %s AND "{key}"::double precision <= %s'
|
||||
)
|
||||
whereValues.extend([float(fromVal), float(toVal)])
|
||||
elif fromVal:
|
||||
whereConditions.append(f'"{key}"::double precision >= %s')
|
||||
whereValues.append(float(fromVal))
|
||||
elif toVal:
|
||||
whereConditions.append(f'"{key}"::double precision <= %s')
|
||||
whereValues.append(float(toVal))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
else:
|
||||
if fromVal and toVal:
|
||||
whereConditions.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s')
|
||||
whereValues.extend([str(fromVal), str(toVal)])
|
||||
elif fromVal:
|
||||
whereConditions.append(f'"{key}"::TEXT >= %s')
|
||||
whereValues.append(str(fromVal))
|
||||
elif toVal:
|
||||
whereConditions.append(f'"{key}"::TEXT <= %s')
|
||||
whereValues.append(str(toVal))
|
||||
return
|
||||
|
||||
if op == "in" and isinstance(v, list):
|
||||
if not v:
|
||||
whereConditions.append("1 = 0")
|
||||
else:
|
||||
whereConditions.append(f'"{key}"::TEXT = ANY(%s)')
|
||||
whereValues.append([str(x) for x in v])
|
||||
return
|
||||
if op == "notIn" and isinstance(v, list):
|
||||
if v:
|
||||
whereConditions.append(f'NOT ("{key}"::TEXT = ANY(%s))')
|
||||
whereValues.append([str(x) for x in v])
|
||||
return
|
||||
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(str(v))
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Namespace-Mapping für statische Tabellen
|
||||
|
|
@ -247,8 +380,8 @@ def getRecordsetWithRBAC(
|
|||
|
||||
# Handle JSONB fields and ensure numeric types are correct
|
||||
# Import the helper function from connector module
|
||||
from modules.connectors.connectorDbPostgre import _get_model_fields
|
||||
fields = _get_model_fields(modelClass)
|
||||
from modules.connectors.connectorDbPostgre import getModelFields
|
||||
fields = getModelFields(modelClass)
|
||||
for record in records:
|
||||
for fieldName, fieldType in fields.items():
|
||||
# Ensure numeric fields are properly typed
|
||||
|
|
@ -379,8 +512,8 @@ def getRecordsetPaginatedWithRBAC(
|
|||
whereValues.append(value)
|
||||
|
||||
if pagination and pagination.filters:
|
||||
from modules.connectors.connectorDbPostgre import _get_model_fields
|
||||
fields = _get_model_fields(modelClass)
|
||||
from modules.connectors.connectorDbPostgre import getModelFields
|
||||
fields = getModelFields(modelClass)
|
||||
validColumns = set(fields.keys())
|
||||
for key, val in pagination.filters.items():
|
||||
if key == "search" and isinstance(val, str) and val.strip():
|
||||
|
|
@ -401,36 +534,10 @@ def getRecordsetPaginatedWithRBAC(
|
|||
whereConditions.append(f'("{key}" IS NULL OR "{key}"::TEXT = \'\')')
|
||||
continue
|
||||
if isinstance(val, dict):
|
||||
op = val.get("operator", "equals")
|
||||
v = val.get("value", "")
|
||||
if op in ("equals", "eq"):
|
||||
whereConditions.append(f'"{key}"::TEXT = %s')
|
||||
whereValues.append(str(v))
|
||||
elif op == "contains":
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(f"%{v}%")
|
||||
elif op == "startsWith":
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(f"{v}%")
|
||||
elif op == "endsWith":
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(f"%{v}")
|
||||
elif op in ("gt", "gte", "lt", "lte"):
|
||||
sqlOp = {"gt": ">", "gte": ">=", "lt": "<", "lte": "<="}[op]
|
||||
whereConditions.append(f'"{key}"::TEXT {sqlOp} %s')
|
||||
whereValues.append(str(v))
|
||||
elif op == "between":
|
||||
fromVal = v.get("from", "") if isinstance(v, dict) else ""
|
||||
toVal = v.get("to", "") if isinstance(v, dict) else ""
|
||||
if fromVal and toVal:
|
||||
whereConditions.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s')
|
||||
whereValues.extend([str(fromVal), str(toVal)])
|
||||
elif fromVal:
|
||||
whereConditions.append(f'"{key}"::TEXT >= %s')
|
||||
whereValues.append(str(fromVal))
|
||||
elif toVal:
|
||||
whereConditions.append(f'"{key}"::TEXT <= %s')
|
||||
whereValues.append(str(toVal))
|
||||
colType = fields.get(key, "TEXT")
|
||||
_rbacAppendPaginationDictFilter(
|
||||
key, val, colType, whereConditions, whereValues
|
||||
)
|
||||
else:
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(str(val))
|
||||
|
|
@ -440,8 +547,8 @@ def getRecordsetPaginatedWithRBAC(
|
|||
|
||||
orderParts: List[str] = []
|
||||
if pagination and pagination.sort:
|
||||
from modules.connectors.connectorDbPostgre import _get_model_fields
|
||||
validColumns = set(_get_model_fields(modelClass).keys())
|
||||
from modules.connectors.connectorDbPostgre import getModelFields
|
||||
validColumns = set(getModelFields(modelClass).keys())
|
||||
for sf in pagination.sort:
|
||||
if sf.field in validColumns:
|
||||
direction = "DESC" if sf.direction.lower() == "desc" else "ASC"
|
||||
|
|
@ -464,10 +571,10 @@ def getRecordsetPaginatedWithRBAC(
|
|||
cursor.execute(dataSql, whereValues)
|
||||
records = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
from modules.connectors.connectorDbPostgre import _get_model_fields, _parseRecordFields
|
||||
fields = _get_model_fields(modelClass)
|
||||
from modules.connectors.connectorDbPostgre import getModelFields, parseRecordFields
|
||||
fields = getModelFields(modelClass)
|
||||
for record in records:
|
||||
_parseRecordFields(record, fields, f"table {table}")
|
||||
parseRecordFields(record, fields, f"table {table}")
|
||||
for fieldName, fieldType in fields.items():
|
||||
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
|
||||
modelFields = modelClass.model_fields
|
||||
|
|
@ -484,6 +591,9 @@ def getRecordsetPaginatedWithRBAC(
|
|||
if enrichPermissions:
|
||||
records = _enrichRecordsWithPermissions(records, permissions, currentUser)
|
||||
|
||||
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||
enrichRowsWithFkLabels(records, modelClass)
|
||||
|
||||
if pagination:
|
||||
pageSize = pagination.pageSize
|
||||
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
|
||||
|
|
@ -518,8 +628,8 @@ def getDistinctColumnValuesWithRBAC(
|
|||
if not connector._ensureTableExists(modelClass):
|
||||
return []
|
||||
|
||||
from modules.connectors.connectorDbPostgre import _get_model_fields
|
||||
fields = _get_model_fields(modelClass)
|
||||
from modules.connectors.connectorDbPostgre import getModelFields
|
||||
fields = getModelFields(modelClass)
|
||||
if column not in fields:
|
||||
return []
|
||||
|
||||
|
|
@ -584,29 +694,10 @@ def getDistinctColumnValuesWithRBAC(
|
|||
whereConditions.append(f'("{key}" IS NULL OR "{key}"::TEXT = \'\')')
|
||||
continue
|
||||
if isinstance(val, dict):
|
||||
op = val.get("operator", "equals")
|
||||
v = val.get("value", "")
|
||||
if op in ("equals", "eq"):
|
||||
whereConditions.append(f'"{key}"::TEXT = %s')
|
||||
whereValues.append(str(v))
|
||||
elif op == "contains":
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(f"%{v}%")
|
||||
elif op == "between":
|
||||
fromVal = v.get("from", "") if isinstance(v, dict) else ""
|
||||
toVal = v.get("to", "") if isinstance(v, dict) else ""
|
||||
if fromVal and toVal:
|
||||
whereConditions.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s')
|
||||
whereValues.extend([str(fromVal), str(toVal)])
|
||||
elif fromVal:
|
||||
whereConditions.append(f'"{key}"::TEXT >= %s')
|
||||
whereValues.append(str(fromVal))
|
||||
elif toVal:
|
||||
whereConditions.append(f'"{key}"::TEXT <= %s')
|
||||
whereValues.append(str(toVal))
|
||||
else:
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(str(v) if isinstance(v, str) else str(val))
|
||||
colType = fields.get(key, "TEXT")
|
||||
_rbacAppendPaginationDictFilter(
|
||||
key, val, colType, whereConditions, whereValues
|
||||
)
|
||||
else:
|
||||
whereConditions.append(f'"{key}"::TEXT ILIKE %s')
|
||||
whereValues.append(str(val))
|
||||
|
|
@ -614,21 +705,34 @@ def getDistinctColumnValuesWithRBAC(
|
|||
whereClause = " WHERE " + " AND ".join(whereConditions) if whereConditions else ""
|
||||
notNullCond = f'"{column}" IS NOT NULL AND "{column}"::TEXT != \'\''
|
||||
if whereClause:
|
||||
whereClause += f" AND {notNullCond}"
|
||||
nonNullWhere = whereClause + f" AND {notNullCond}"
|
||||
else:
|
||||
whereClause = f" WHERE {notNullCond}"
|
||||
nonNullWhere = f" WHERE {notNullCond}"
|
||||
|
||||
sql = f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{whereClause} ORDER BY val'
|
||||
sql = f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{nonNullWhere} ORDER BY val'
|
||||
|
||||
with connector.connection.cursor() as cursor:
|
||||
cursor.execute(sql, whereValues)
|
||||
return [row["val"] for row in cursor.fetchall()]
|
||||
result = [row["val"] for row in cursor.fetchall()]
|
||||
|
||||
# Include a None entry when NULL/empty rows exist (enables "(Leer)" filter)
|
||||
emptyCond = f'("{column}" IS NULL OR "{column}"::TEXT = \'\')'
|
||||
if whereClause:
|
||||
emptySql = f'SELECT 1 FROM "{table}"{whereClause} AND {emptyCond} LIMIT 1'
|
||||
else:
|
||||
emptySql = f'SELECT 1 FROM "{table}" WHERE {emptyCond} LIMIT 1'
|
||||
with connector.connection.cursor() as cursor:
|
||||
cursor.execute(emptySql, whereValues)
|
||||
if cursor.fetchone():
|
||||
result.append(None)
|
||||
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"Error in getDistinctColumnValuesWithRBAC for {table}.{column}: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def _buildFilesScopeWhereClause(
|
||||
def buildFilesScopeWhereClause(
|
||||
currentUser: User,
|
||||
table: str,
|
||||
connector,
|
||||
|
|
@ -673,7 +777,7 @@ def _buildFilesScopeWhereClause(
|
|||
if instances:
|
||||
effectiveMandateId = instances[0].get("mandateId") or ""
|
||||
except Exception as e:
|
||||
logger.warning(f"_buildFilesScopeWhereClause: could not resolve mandate for instance {featureInstanceId}: {e}")
|
||||
logger.warning(f"buildFilesScopeWhereClause: could not resolve mandate for instance {featureInstanceId}: {e}")
|
||||
|
||||
scopeParts: List[str] = []
|
||||
scopeValues: List = []
|
||||
|
|
@ -757,7 +861,7 @@ def buildRbacWhereClause(
|
|||
namespaceAll = TABLE_NAMESPACE.get(table, "system")
|
||||
# Files: scope-based context filtering applies even with ALL access
|
||||
if namespaceAll == "files":
|
||||
return _buildFilesScopeWhereClause(
|
||||
return buildFilesScopeWhereClause(
|
||||
currentUser, table, connector, mandateId, featureInstanceId,
|
||||
baseConditions, baseValues,
|
||||
)
|
||||
|
|
@ -811,7 +915,7 @@ def buildRbacWhereClause(
|
|||
# - scope='featureInstance' → visible to users with access to that instance
|
||||
# - scope='personal' → only visible to owner (sysCreatedBy)
|
||||
if namespace == "files":
|
||||
return _buildFilesScopeWhereClause(
|
||||
return buildFilesScopeWhereClause(
|
||||
currentUser, table, connector, mandateId, featureInstanceId,
|
||||
baseConditions, baseValues,
|
||||
)
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue