Compare commits
54 commits
main
...
feat/grafi
| Author | SHA1 | Date | |
|---|---|---|---|
| 00dc04cdae | |||
| 93aff13d26 | |||
| 6e3da0d0d8 | |||
| eeb9a4a161 | |||
| dac9911f8b | |||
| 5455e09367 | |||
| 9ae2ffc415 | |||
| f184da9898 | |||
| e6ca6a9d8e | |||
| f96325f804 | |||
| 60b2fcf56b | |||
|
|
64ee5200af | ||
|
|
e93ce71174 | ||
|
|
3da6e24bec | ||
|
|
d3d682fe4d | ||
|
|
7942766931 | ||
|
|
c140bd14d4 | ||
| 06d9910ecd | |||
|
|
b500bfa6c1 | ||
|
|
afd7e9d941 | ||
|
|
b12671bbb5 | ||
|
|
880fa4d787 | ||
| 72d3175f49 | |||
| ce671f61b6 | |||
| 4a840e9e6e | |||
| 93cb6939dc | |||
| 3add5c9a80 | |||
| 6a5ff1ff7c | |||
| dff3d41845 | |||
| a7f4055130 | |||
| 078b4eaaaf | |||
| 9d82d3d353 | |||
|
|
ba21005401 | ||
|
|
052647a52b | ||
|
|
49f3660d89 | ||
|
|
9816f13ae9 | ||
|
|
b405cebdec | ||
|
|
fb3a1f0a51 | ||
|
|
4d7ccb0418 | ||
|
|
d9fcea54ff | ||
|
|
e8abd553d0 | ||
|
|
30ea8bbefe | ||
|
|
96e2356ddd | ||
|
|
3507c16055 | ||
|
|
f8853d23ca | ||
|
|
d505ffd9cd | ||
|
|
60d5062204 | ||
|
|
564a1200c6 | ||
|
|
8221a0da3e | ||
|
|
24f0c3e2eb | ||
|
|
794ba36f27 | ||
|
|
b6be8f391e | ||
| dd2c771cb8 | |||
| e8adf18b0f |
349 changed files with 36799 additions and 7141 deletions
41
app.py
41
app.py
|
|
@ -294,6 +294,14 @@ except Exception as e:
|
||||||
async def lifespan(app: FastAPI):
|
async def lifespan(app: FastAPI):
|
||||||
logger.info("Application is starting up")
|
logger.info("Application is starting up")
|
||||||
|
|
||||||
|
# Validate FK metadata on all Pydantic models (fail-fast, no silent fallbacks)
|
||||||
|
from modules.shared.fkRegistry import validateFkTargets
|
||||||
|
fkErrors = validateFkTargets()
|
||||||
|
if fkErrors:
|
||||||
|
for err in fkErrors:
|
||||||
|
logger.error("FK metadata validation: %s", err)
|
||||||
|
raise SystemExit(f"FK metadata validation failed ({len(fkErrors)} error(s)) — fix datamodels before starting")
|
||||||
|
|
||||||
# AI connectors already pre-warmed at module-load via _eager_prewarm() in aicoreModelRegistry.
|
# AI connectors already pre-warmed at module-load via _eager_prewarm() in aicoreModelRegistry.
|
||||||
|
|
||||||
# Bootstrap database if needed (creates initial users, mandates, roles, etc.)
|
# Bootstrap database if needed (creates initial users, mandates, roles, etc.)
|
||||||
|
|
@ -327,9 +335,9 @@ async def lifespan(app: FastAPI):
|
||||||
|
|
||||||
# Sync gateway i18n registry to DB and load translation cache
|
# Sync gateway i18n registry to DB and load translation cache
|
||||||
try:
|
try:
|
||||||
from modules.shared.i18nRegistry import _syncRegistryToDb, _loadCache
|
from modules.shared.i18nRegistry import syncRegistryToDb, loadCache
|
||||||
await _syncRegistryToDb()
|
await syncRegistryToDb()
|
||||||
await _loadCache()
|
await loadCache()
|
||||||
logger.info("i18n registry sync + cache load completed")
|
logger.info("i18n registry sync + cache load completed")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"i18n registry sync failed (non-critical): {e}")
|
logger.warning(f"i18n registry sync failed (non-critical): {e}")
|
||||||
|
|
@ -397,6 +405,16 @@ async def lifespan(app: FastAPI):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"BackgroundJob recovery failed (non-critical): {e}")
|
logger.warning(f"BackgroundJob recovery failed (non-critical): {e}")
|
||||||
|
|
||||||
|
# Subscribe knowledge ingestion to connection lifecycle events so OAuth
|
||||||
|
# connect/disconnect reliably trigger bootstrap/purge.
|
||||||
|
try:
|
||||||
|
from modules.serviceCenter.services.serviceKnowledge.subConnectorIngestConsumer import (
|
||||||
|
registerKnowledgeIngestionConsumer,
|
||||||
|
)
|
||||||
|
registerKnowledgeIngestionConsumer()
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"KnowledgeIngestionConsumer registration failed (non-critical): {e}")
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
# --- Stop Managers ---
|
# --- Stop Managers ---
|
||||||
|
|
@ -522,15 +540,15 @@ from modules.auth import (
|
||||||
# Per-request context middleware: language (Accept-Language) + user timezone (X-User-Timezone).
|
# Per-request context middleware: language (Accept-Language) + user timezone (X-User-Timezone).
|
||||||
# Both are written into ContextVars and consumed by t() / resolveText() and getRequestNow()
|
# Both are written into ContextVars and consumed by t() / resolveText() and getRequestNow()
|
||||||
# without having to thread them through every call site.
|
# without having to thread them through every call site.
|
||||||
from modules.shared.i18nRegistry import _setLanguage, normalizePrimaryLanguageTag
|
from modules.shared.i18nRegistry import setLanguage, normalizePrimaryLanguageTag
|
||||||
from modules.shared.timeUtils import _setRequestTimezone
|
from modules.shared.timeUtils import setRequestTimezone
|
||||||
|
|
||||||
@app.middleware("http")
|
@app.middleware("http")
|
||||||
async def _requestContextMiddleware(request: Request, call_next):
|
async def _requestContextMiddleware(request: Request, call_next):
|
||||||
acceptLang = request.headers.get("Accept-Language", "")
|
acceptLang = request.headers.get("Accept-Language", "")
|
||||||
lang = normalizePrimaryLanguageTag(acceptLang, "de")
|
lang = normalizePrimaryLanguageTag(acceptLang, "de")
|
||||||
_setLanguage(lang)
|
setLanguage(lang)
|
||||||
_setRequestTimezone(request.headers.get("X-User-Timezone", ""))
|
setRequestTimezone(request.headers.get("X-User-Timezone", ""))
|
||||||
return await call_next(request)
|
return await call_next(request)
|
||||||
|
|
||||||
app.add_middleware(CSRFMiddleware)
|
app.add_middleware(CSRFMiddleware)
|
||||||
|
|
@ -582,6 +600,9 @@ app.include_router(promptRouter)
|
||||||
from modules.routes.routeDataConnections import router as connectionsRouter
|
from modules.routes.routeDataConnections import router as connectionsRouter
|
||||||
app.include_router(connectionsRouter)
|
app.include_router(connectionsRouter)
|
||||||
|
|
||||||
|
from modules.routes.routeTableViews import router as tableViewsRouter
|
||||||
|
app.include_router(tableViewsRouter)
|
||||||
|
|
||||||
from modules.routes.routeSecurityLocal import router as localRouter
|
from modules.routes.routeSecurityLocal import router as localRouter
|
||||||
app.include_router(localRouter)
|
app.include_router(localRouter)
|
||||||
|
|
||||||
|
|
@ -594,6 +615,9 @@ app.include_router(googleRouter)
|
||||||
from modules.routes.routeSecurityClickup import router as clickupRouter
|
from modules.routes.routeSecurityClickup import router as clickupRouter
|
||||||
app.include_router(clickupRouter)
|
app.include_router(clickupRouter)
|
||||||
|
|
||||||
|
from modules.routes.routeSecurityInfomaniak import router as infomaniakRouter
|
||||||
|
app.include_router(infomaniakRouter)
|
||||||
|
|
||||||
from modules.routes.routeClickup import router as clickupApiRouter
|
from modules.routes.routeClickup import router as clickupApiRouter
|
||||||
app.include_router(clickupApiRouter)
|
app.include_router(clickupApiRouter)
|
||||||
|
|
||||||
|
|
@ -661,6 +685,9 @@ app.include_router(navigationRouter)
|
||||||
from modules.routes.routeWorkflowDashboard import router as workflowDashboardRouter
|
from modules.routes.routeWorkflowDashboard import router as workflowDashboardRouter
|
||||||
app.include_router(workflowDashboardRouter)
|
app.include_router(workflowDashboardRouter)
|
||||||
|
|
||||||
|
from modules.routes.routeAutomationWorkspace import router as automationWorkspaceRouter
|
||||||
|
app.include_router(automationWorkspaceRouter)
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# PLUG&PLAY FEATURE ROUTERS
|
# PLUG&PLAY FEATURE ROUTERS
|
||||||
# Dynamically load routers from feature containers in modules/features/
|
# Dynamically load routers from feature containers in modules/features/
|
||||||
|
|
|
||||||
BIN
assets/fonts/NotoEmoji-Regular.ttf
Normal file
BIN
assets/fonts/NotoEmoji-Regular.ttf
Normal file
Binary file not shown.
|
|
@ -37,7 +37,8 @@
|
||||||
"y": 200,
|
"y": 200,
|
||||||
"title": "Pro Scan-Dokument",
|
"title": "Pro Scan-Dokument",
|
||||||
"parameters": {
|
"parameters": {
|
||||||
"level": 1,
|
"items": {"type": "ref", "nodeId": "n2", "path": ["files"]},
|
||||||
|
"level": "auto",
|
||||||
"concurrency": 1
|
"concurrency": 1
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
||||||
107
env_dev.20260428_213450.backup
Normal file
107
env_dev.20260428_213450.backup
Normal file
|
|
@ -0,0 +1,107 @@
|
||||||
|
# Development Environment Configuration
|
||||||
|
|
||||||
|
# System Configuration
|
||||||
|
APP_ENV_TYPE = dev
|
||||||
|
APP_ENV_LABEL = Development Instance Patrick
|
||||||
|
APP_API_URL = http://localhost:8000
|
||||||
|
APP_KEY_SYSVAR = D:/Athi/Local/Web/poweron/local/notes/key.txt
|
||||||
|
APP_INIT_PASS_ADMIN_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpEeFFtRGtQeVUtcjlrU3dab1ZxUm9WSks0MlJVYUtERFlqUElHemZrOGNENk1tcmJNX3Vxc01UMDhlNU40VzZZRVBpUGNmT3podzZrOGhOeEJIUEt4eVlSWG5UYXA3d09DVXlLT21Kb1JYSUU9
|
||||||
|
APP_INIT_PASS_EVENT_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpERzZjNm56WGVBdjJTeG5Udjd6OGQwUVotYXUzQjJ1YVNyVXVBa3NZVml3ODU0MVNkZjhWWmJwNUFkc19BcHlHMTU1Q3BRcHU0cDBoZkFlR2l6UEZQU3d2U3MtMDh5UDZteGFoQ0EyMUE1ckE9
|
||||||
|
|
||||||
|
# PostgreSQL DB Host
|
||||||
|
DB_HOST=localhost
|
||||||
|
DB_USER=poweron_dev
|
||||||
|
DB_PASSWORD_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpEcUIxNEFfQ2xnS0RrSC1KNnUxTlVvTGZoMHgzaEI4Z3NlVzVROTVLak5Ubi1vaEZubFZaMTFKMGd6MXAxekN2d2NvMy1hRjg2UVhybktlcFA5anZ1WjFlQmZhcXdwaGhWdzRDc3ExeUhzWTg9
|
||||||
|
DB_PORT=5432
|
||||||
|
|
||||||
|
# Security Configuration
|
||||||
|
APP_JWT_KEY_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpERjlrSktmZHVuQnJ1VVJDdndLaUcxZGJsT2ZlUFRlcFdOZ001RnlzM2FhLWhRV2tjWWFhaWQwQ3hkcUFvbThMcndxSjFpYTdfRV9OZGhTcksxbXFTZWg5MDZvOHpCVXBHcDJYaHlJM0tyNWRZckZsVHpQcmxTZHJoZUs1M3lfU2ljRnJaTmNSQ0w0X085OXI0QW80M2xfQnJqZmZ6VEh3TUltX0xzeE42SGtZPQ==
|
||||||
|
APP_TOKEN_EXPIRY=300
|
||||||
|
|
||||||
|
# CORS Configuration
|
||||||
|
APP_ALLOWED_ORIGINS=http://localhost:8080,http://localhost:5176,https://playground.poweron-center.net
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
APP_LOGGING_LOG_LEVEL = DEBUG
|
||||||
|
APP_LOGGING_LOG_DIR = D:/Athi/Local/Web/poweron/local/logs
|
||||||
|
APP_LOGGING_FORMAT = %(asctime)s - %(levelname)s - %(name)s - %(message)s
|
||||||
|
APP_LOGGING_DATE_FORMAT = %Y-%m-%d %H:%M:%S
|
||||||
|
APP_LOGGING_CONSOLE_ENABLED = True
|
||||||
|
APP_LOGGING_FILE_ENABLED = True
|
||||||
|
APP_LOGGING_ROTATION_SIZE = 10485760
|
||||||
|
APP_LOGGING_BACKUP_COUNT = 5
|
||||||
|
|
||||||
|
# OAuth: Auth app (login/JWT) vs Data app (Microsoft Graph / Google APIs). Same IDs until you split apps in Azure / GCP.
|
||||||
|
Service_MSFT_AUTH_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
|
||||||
|
Service_MSFT_AUTH_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQm83T29rV1pQelMtc1p1MXR4NTFpa19CTEhHQ0xfNmdPUmZqcWp5UHBMS0hYTGl4c1pPdmhTNTJVWUl5WnlnUUZhV0VTRzVCb0d5YjR1NnZPZk5CZ0dGazNGdUJVbjkxeVdrYlNiVjJUYzF2aVFtQnVxTHFqTTJqZlF0RTFGNmE1OGN1TEk=
|
||||||
|
Service_MSFT_AUTH_REDIRECT_URI = http://localhost:8000/api/msft/auth/login/callback
|
||||||
|
Service_MSFT_DATA_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
|
||||||
|
Service_MSFT_DATA_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQm83T29rV1pQelMtc1p1MXR4NTFpa19CTEhHQ0xfNmdPUmZqcWp5UHBMS0hYTGl4c1pPdmhTNTJVWUl5WnlnUUZhV0VTRzVCb0d5YjR1NnZPZk5CZ0dGazNGdUJVbjkxeVdrYlNiVjJUYzF2aVFtQnVxTHFqTTJqZlF0RTFGNmE1OGN1TEk=
|
||||||
|
Service_MSFT_DATA_REDIRECT_URI = http://localhost:8000/api/msft/auth/connect/callback
|
||||||
|
|
||||||
|
Service_GOOGLE_AUTH_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
|
||||||
|
Service_GOOGLE_AUTH_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpETDJhbGVQMHlFQzNPVFI1ZzBMa3pNMGlQUHhaQm10eVl1bFlSeTBybzlTOWE2MURXQ0hkRlo0NlNGbHQxWEl1OVkxQnVKYlhhOXR1cUF4T3k0WDdscktkY1oyYllRTmdDTWpfbUdwWGtSd1JvNlYxeTBJdEtaaS1vYnItcW0yaFM=
|
||||||
|
Service_GOOGLE_AUTH_REDIRECT_URI = http://localhost:8000/api/google/auth/login/callback
|
||||||
|
Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
|
||||||
|
Service_GOOGLE_DATA_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpETDJhbGVQMHlFQzNPVFI1ZzBMa3pNMGlQUHhaQm10eVl1bFlSeTBybzlTOWE2MURXQ0hkRlo0NlNGbHQxWEl1OVkxQnVKYlhhOXR1cUF4T3k0WDdscktkY1oyYllRTmdDTWpfbUdwWGtSd1JvNlYxeTBJdEtaaS1vYnItcW0yaFM=
|
||||||
|
Service_GOOGLE_DATA_REDIRECT_URI = http://localhost:8000/api/google/auth/connect/callback
|
||||||
|
|
||||||
|
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
||||||
|
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||||
|
Service_CLICKUP_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd4ZWVBeHVtRnpIT0VBN0tSZDhLRmFmN05DOVBOelJtLWhkVnJDRVBqUkh3bDFTZFRWaWQ1cWowdGNLUk5IQzlGN1J6RFVCaW8zRnBwLVBnclJfdWgxV3pVRzFEV2lwcW5Rc19Xa1ROWXNJcUF0ajZaYUxOUXk0WHRsRmJLM25FaHV5T2IxdV92ZW1nRjhzaGpwU0l2Wm9FTkRnY2lJVjhuNHUwT29salAxYV8wPQ==
|
||||||
|
Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/connect/callback
|
||||||
|
|
||||||
|
# Infomaniak OAuth -- Data App (kDrive + Mail)
|
||||||
|
Service_INFOMANIAK_DATA_CLIENT_ID = abd71a95-7c67-465a-b7ab-963cc5eccb4b
|
||||||
|
Service_INFOMANIAK_DATA_CLIENT_SECRET = jwaEZza0VnmAHA1vIQJcpaCC1O4ND6IS0mkQ0GGiVlmof7XHxUcl9YMl7TbtEINz
|
||||||
|
Service_INFOMANIAK_OAUTH_REDIRECT_URI = http://localhost:8000/api/infomaniak/auth/connect/callback
|
||||||
|
|
||||||
|
# Stripe Billing (both end with _SECRET for encryption script)
|
||||||
|
STRIPE_SECRET_KEY_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5aHNGejgzQmpTdmprdzQxR19KZkh3MlhYUTNseFN3WnlaWjh2SDZyalN6aU9xSktkbUQwUnZrVnlvbGVRQm4yZFdiRU5aSEk5WVJuUnR4VUwtTm9OVk1WWmJQeU5QaDdib0hfVWV5U1BfYTFXRmdoOWdnOWxkb3JFQmF3bm45UjFUVUxmWGtGRkFKUGd6bmhpQlFnaVI3Q2lLdDlsY1VESk1vOEM0ZFBJNW1qcVZ0N2tPYmRLNmVKajZ2M3o3S05lWnRRVG5LdkRseW4wQ3VjNHNQZTZUdz09
|
||||||
|
STRIPE_WEBHOOK_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5dDJMSHBrVk8wTzJhU2xzTTZCZWdvWmU2NGI2WklfRXRJZVUzaVYyOU9GLUZsalUwa2lPdEgtUHo0dVVvRDU1cy1saHJyU0Rxa2xQZjBuakExQzk3bmxBcU9WbEIxUEtpR1JoUFMxZG9ISGRZUXFhdFpSMGxvQUV3a0VLQllfUUtCOHZwTGdteV9rYTFOazBfSlN3ekNWblFpakJlZVlCTmNkWWQ4Sm01a1RCWTlnTlFHWVA0MkZYMlprUExrWFN2V0NVU1BTd1NKczFJbVo3VHpLdlc4UT09
|
||||||
|
STRIPE_API_VERSION = 2026-01-28.clover
|
||||||
|
STRIPE_AUTOMATIC_TAX_ENABLED = false
|
||||||
|
STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQd14OUoIL0Osj7A0ZQlr0
|
||||||
|
|
||||||
|
# AI configuration
|
||||||
|
Connector_AiOpenai_API_SECRET = DEV_ENC:Z0FBQUFBQnBaSnM4TWFRRmxVQmNQblVIYmc1Y0Q3aW9zZUtDWlNWdGZjbFpncGp2NHN2QjkxMWxibUJnZDBId252MWk5TXN3Yk14ajFIdi1CTkx2ZWx2QzF5OFR6LUx5azQ3dnNLaXJBOHNxc0tlWmtZcTFVelF4eXBSM2JkbHd2eTM0VHNXdHNtVUprZWtPVzctNlJsZHNmM20tU1N6Q1Q2cHFYSi1tNlhZNDNabTVuaEVGWmIydEhadTcyMlBURmw2aUJxOF9GTzR0dTZiNGZfOFlHaVpPZ1A1LXhhOEFtN1J5TEVNNWtMcGpyNkMzSl8xRnZsaTF1WTZrOUZmb0cxVURjSGFLS2dIYTQyZEJtTm90bEYxVWxNNXVPdTVjaVhYbXhxT3JsVDM5VjZMVFZKSE1tZnM9
|
||||||
|
Connector_AiAnthropic_API_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpENmFBWG16STFQUVZxNzZZRzRLYTA4X3lRanF1VkF4cU45OExNMzlsQmdISGFxTUxud1dXODBKcFhMVG9KNjdWVnlTTFFROVc3NDlsdlNHLUJXeG41NDBHaXhHR0VHVWl5UW9RNkVWbmlhakRKVW5pM0R4VHk0LUw0TV9LdkljNHdBLXJua21NQkl2b3l4UkVkMGN1YjBrMmJEeWtMay1jbmxrYWJNbUV0aktCXzU1djR2d2RSQXZORTNwcG92ZUVvVGMtQzQzTTVncEZTRGRtZUFIZWQ0dz09
|
||||||
|
Connector_AiPerplexity_API_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5ZmdDZ3hrSElrMnQzNFAtel9wX191VjVzN2g1LWZoa0V1YklubEdmMEJDdEZiR1RWeVZrM3V3enBHX3p6WUtTS0kwYkFyVEF0Nm8zX05CelVQcFJUc0lwVW5iNFczc1p1WWJ2WFBmd0lpLUxxWndEeUh0b2hGUHVpN19vb19nMTBnV1A1VmNpWERVX05lQ29VS20wTjZ3PT0=
|
||||||
|
Connector_AiTavily_API_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpEQTdnUHMwd2pIaXNtMmtCTFREd0pyQXRKb1F5eGtHSnkyOGZiUnlBOFc0b3Vzcndrc3ViRm1nMDJIOEZKYWxqdWNkZGh5N0Z4R0JlQmxXSG5pVnJUR2VYckZhMWNMZ1FNeXJ3enJLVlpiblhOZTNleUg3ZzZyUzRZanFSeDlVMkI=
|
||||||
|
Connector_AiPrivateLlm_API_SECRET = DEV_ENC:Z0FBQUFBQnBudkpGRHM5eFdUVmVZU1R1cHBwN1RlMUx4T0NlLTJLUFFVX3J2OElDWFpuZmJHVmp4Z3BNNWMwZUVVZUd2TFhRSjVmVkVlcFlVRWtybXh0ZHloZ01ZcnVvX195YjdlWVdEcjZSWFFTTlNBWUlaTlNoLWhqVFBIb0thVlBiaWhjYjFQOFY=
|
||||||
|
Connector_AiMistral_API_SECRET = DEV_ENC:Z0FBQUFBQnBudkpGeEQxYUIxOHhia0JlQWpWQ2dWQWZzY3l6SWwyUnJoR1hRQWloX2lxb2lGNkc4UnA4U2tWNjJaYzB1d1hvNG9fWUp1N3V4OW9FMGhaWVhjSlVwWEc1X2loVDBSZDEtdHdfcTA5QkcxQTR4OHc4RkRzclJrU2d1RFZpNDJkRDRURlE=
|
||||||
|
|
||||||
|
Service_MSFT_TENANT_ID = common
|
||||||
|
|
||||||
|
# Google Cloud Speech Services configuration
|
||||||
|
Connector_GoogleSpeech_API_KEY_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpETk5FWWM3Q0JKMzhIYTlyMkhuNjA4NlF4dk82U2NScHhTVGY3UG83NkhfX3RrcWVtWWcyLXRjU1dTT21zWEl6YWRMMUFndXpsUnJOeHh3QThsNDZKRXROTzdXRUdsT0JZajZJNVlfb0gtMXkwWm9DOERPVnpjU0pyUEZfOGJsUnprT3ltMVVhalUyUm9hMUFtZEtHUnJqOGZ4dEZjZm5SWVVTckVCWnY1UkdVSHVmUlgwbnAyc0xDQW84R3ViSko5OHVCVWZRUVNiaG1pVFB6X3EwS0FPd2dUYjhiSmRjcXh2WEZiXzI4SFZqT21tbDduUWRyVWdFZXpmcVM5ZDR0VWtzZnF5UER6cGwwS2JlLV9CSTZ0Z0IyQ1h0YW9TcmhRTXZEckp4bWhmTkt6UTNYMk4zVkpnbUJmaDIxZnoyR2dWTEYwTUFEV0w2eUdUUGpoZk9XRkt4RVF1Z1NPdUpBeTcyWV9PY1Ffd2s0ZEdVekxGekhoeEl4TmNqaXYtbUJuSVdycFducERWdWtZajZnX011Q2w4eE9VMTBqQ1ZxRmdScWhXY1E3WWhzX1JZcHhxam9FbDVPN3Q1MWtrMUZuTUg3LVFQVHp1T1hpQWNDMzEzekVJWk9ybl91YUVjSkFob1VaMi1ONEtuMnRSOEg1S3QybUMwbVZDejItajBLTjM2Zy1hNzZQMW5LLVVDVGdFWm5BZUxNeEFnUkZzU3dxV0lCUlc0LWo4b05GczVpOGZSV2ZxbFBwUml6OU5tYjdnTks3Y3hrVEZVTHlmc1NPdFh4WE5pWldEZklOQUxBbjBpMTlkX3FFQVJ6c2NSZGdzTThycE92VW82enZKamhiRGFnU25aZGlHZHhZd2lUUmhuTVptNjhoWVlJQkxIOEkzbzJNMjZCZFJyM25tdXBnQ2ZWaHV3b2p6UWJpdk9xUEhBc1dyTlNmeF9wbm5yYUhHV01UZnVXWDFlNzBkdXlWUWhvcmJpSmljbmE3LUpUZEg4VzRwZ2JVSjdYUm1sODViQXVxUzdGTmZFbVpiN2V1YW5XV3U4b2VRWmxldGVGVHZsSldoekhVLU9wZ2V0cGZIYkNqM2pXVGctQVAyUm4xTHhpd1VVLXFhcnVEV21Rby1hbTlqTl84TjVveHdYTExUVkhHQ0ltaTB2WXJnY1NQVE5PbWg3ejgySElYc1JSTlQ3NDlFUWR6STZVUjVqaXFRN200NF9LY1ljQ0R2UldlWUtKY1NQVnJ4QXRyYTBGSWVuenhyM0Z0cWtndTd1eG8xRzY5a2dNZ1hkQm5MV3BHVzA2N1QwUkd6WlRGYTZQOUhnVWQ2S0Y5U0s1dXFNVXh5Q2pLWVUxSUQ2MlR1ak52NmRIZ2hlYTk1SGZGWS1RV3hWVU9rR3d1Rk9MLS11REZXbzhqMHpsSm1HYW1jMUNLT29YOHZsRWNaLTVvOFpmT3l3MHVwaERTT0dNLWFjcGRYZ25qT2szTkVFUnRFR3JWYS1aNXFIRnMyalozTlQzNFF2NXJLVHVPVF9zdTF6ZjlkbzJ4RFc2ZENmNFFxZDZzTzhfMUl0bW96V0lPZkh1dXFYZlEteFBlSG84Si1FNS1TTi1OMkFnX2pOYW8xY3MxMVJnVC02MDUyaXZfMEVHWDQtVlRpcENmV0h3V0dCWEFRS2prQXdNRlQ5dnRFVHU0Q1dNTmh0SlBCaU55bFMydWM1TTFFLW96ODBnV3dNZHFZTWZhRURYSHlrdzF3RlRuWDBoQUhSOUJWemtRM3pxcDJFbGJoaTJ3ZktRTlJxbXltaHBoZXVJVDlxS3cxNWo2c0ZBV0NzaUstRWdsMW1xLXFkanZGYUFiU0tSLXFQa0tkcDFoMV9kak41ZjQ0R214UmtOR1ZBanRuemY3Mmw1SkZ5aDZodGIzT3N2aV85MW9kcld6c0g0ZDgtTWo3b3Y3VjJCRnR2U2tMVm9rUXNVRnVHbzZXVTZ6RmI2RkNmajBfMWVnODVFbnpkT0oyci15czJHU0p1cUowTGZJMzVnd3hIRjQyTVhKOGRkcFRKdVpyQ3Yzd01Jb1lSajFmV0paeEV0cjk1SmpmdWpDVFJMUmMtUFctOGhaTmlKQXNRVlVUNlhJemxudHZCR056SVlBb3NOTEYxRTRLaFlVd2d3TWtxVlB6ZEtQLTkxOGMyY3N0a2pYRFUweDBNaGhja2xSSklPOUZla1dKTWRNbG8tUGdSNEV5cW90OWlOZFlIUExBd3U2b2hyS1owbXVMM3p0Qm41cUtzWUxYNzB1N3JpUTNBSGdsT0NuamNTb1lIbXR4MG1sakNPVkxBUXRLVE1xX0YxWDhOcERIY1lTQVFqS01CaXZKNllFaXlIR0JsM1pKMmV1OUo3TGI1WkRaVnYxUTl1LTM0SU1qN1V1b0RCT0x0VHNLTmNLZnk1S0MxYnBBcm03WnVua0xqaEhGUzhOU253ZkppRzdudXBSVlMxeFVOSWxtZ1o2RVBSQUhEUEFuQ1hxSVZMME4yWUtaU3VyRGo3RkUyRUNjT0pNcE1BdE1ZRzdXVl8ydUtXZjdMdHdEVW4teHUtTi1HSGliLUxud21TX0NtcGVkRFBHNkZ1WTlNczR4OUJfUVluc1BoV09oWS1scUdsNnB5d1U5M1huX3k4QzAyNldtb2hybktYN2xKZ1NTNWFsaWwzV3pCRVhkaGR5eTNlV1d6ZzFfaFZTT0E4UjRpQ3pKdEZxUlJ6UFZXM3laUndyWEk2NlBXLUpoajVhZzVwQXpWVzUtVjVNZFBwdWdQa3AxZC1KdGdqNnhibjN4dmFYb2cxcEVwc1g5R09zRUdINUZtOE5QRjVUU0dpZy1QVl9odnFtVDNuWFZLSURtMXlSMlhRNTBWSVFJbEdOOWpfVWV0SmdRWDdlUXZZWE8xRUxDN1I0aEN6MHYwNzM1cmpJS0ZpMnBYWkxfb3FsbEV1VnlqWGxqdVJ6SHlwSjAzRlMycTBaQ295NXNnZERpUnJQcjhrUUd3bkI4bDVzRmxQblhkaFJPTTdISnVUQmhET3BOMTM4bjVvUEc2VmZhb2lrR1FyTUl2RWNEeGg0U0dsNnV6eU5zOUxiNDY5SXBxR0hBS00wOTgyWTFnWkQyaEtLVUloT3ZxZGh0RWVGRmJzenFsaUtfZENQM0JzdkVVeTdXR3hUSmJST1NBMUI1NkVFWncwNW5JZVVLX1p1RXdqVnFfQWpvQ08yQjZhN1NkTkpTSnUxOVRXZXE0WFEtZWxhZW1NNXYtQ2sya0VGLURmS01lMkctNVY3c2ZhN0ZGRFgwWHlabTFkeS1hcUZ1dDZ3cnpPQ3hha2IzVE11M0pqbklmU0diczBqTFBNZC1QZGp6VzNTSnJVSjJoWkJUQjVORG4tYUJmMEJtSUNUdVpEaGt6OTM3TjFOdVhXUHItZjRtZ25nU3NhZC1sVTVXNTRDTmxZbnlfeHNsdkpuMXhUYnE1MnpVQ0ZOclRWM1M4eHdXTzRXbFRZZVQtTS1iRVdXVWZMSGotcWg3MUxUYTFnSEEtanBCRHlZRUNIdGdpUFhsYjdYUndCZnRITzhMZVJ1dHFoVlVNb0duVjlxd0U4OGRuQVV3MG90R0hiYW5MWkxWVklzbWFRNzBfSUNrdzc5bVdtTXg0dExEYnRCaDI3c1I4TWFwLXZKR0wxSjRZYjZIV3ZqZjNqTWhFT0RGSDVMc1A1UzY2bDBiMGFSUy1fNVRQRzRJWDVydUpqb1ZfSHNVbldVeUN2YlAxSW5WVDdxVzJ1WHpLeUdmb0xWMDNHN05oQzY3YnhvUUdhS2xaOHNidkVvbTZtSHFlblhOYmwyR3NQdVJDRUdxREhWdF9ZcXhwUWxHc2hyLW5vUGhIUVhJNUNhY0hFU0ptVnI0TFVhZDE1TFBBUEstSkRoZWJ5MHJhUmZrR1ZrRlFtRGpxS1pOMmFMQjBsdjluY3FiYUU4eGJVVXlZVEpuNWdHVVhJMGtwaTdZR2NDbXd2eHpOQ09SeTV6N1BaVUpsR1pQVDBZcElJUUt6VnVpQmxSYnE4Y1BCWV9IRWdVV0p3enBGVHItdnBGN3NyNWFBWmkySnByWThsbDliSlExQmp3LVlBaDIyZXp6UnR6cU9rTzJmTDBlSVpON0tiWllMdm1oME1zTFl2S2ZYYllhQlY2VHNZRGtHUDY4U1lIVExLZTU4VzZxSTZrZHl1ZTBDc0g4SjI4WGYyZHV1bm9wQ3R2Z09ld1ZmUkN5alJGeHZKSHl1bWhQVXpNMzdjblpLcUhfSm02Qlh5S1FVN3lIcHl0NnlRPT0=
|
||||||
|
|
||||||
|
# Feature SyncDelta JIRA configuration
|
||||||
|
Feature_SyncDelta_JIRA_DELTA_TOKEN_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpEbm0yRUJ6VUJKbUwyRW5kMnRaNW4wM2YxMkJUTXVXZUdmdVRCaUZIVHU2TTV2RWZLRmUtZkcwZE4yRUNlNDQ0aUJWYjNfdVg5YjV5c2JwMHhoUUYxZWdkeS11bXR0eGxRLWRVaVU3cUVQZWJlNDRtY1lWUDdqeDVFSlpXS0VFX21WajlRS3lHQjc0bS11akkybWV3QUFlR2hNWUNYLUdiRjZuN2dQODdDSExXWG1Dd2ZGclI2aUhlSWhETVZuY3hYdnhkb2c2LU1JTFBvWFpTNmZtMkNVOTZTejJwbDI2eGE0OS1xUlIwQnlCSmFxRFNCeVJNVzlOMDhTR1VUamx4RDRyV3p6Tk9qVHBrWWdySUM3TVRaYjd3N0JHMFhpdzFhZTNDLTFkRVQ2RVE4U19COXRhRWtNc0NVOHRqUS1CRDFpZ19xQmtFLU9YSDU3TXBZQXpVcld3PT0=
|
||||||
|
|
||||||
|
# Teamsbot Browser Bot Service
|
||||||
|
# For local testing: run the bot locally with `npm run dev` in service-teams-browser-bot
|
||||||
|
# The bot will connect back to localhost:8000 via WebSocket
|
||||||
|
TEAMSBOT_BROWSER_BOT_URL = http://localhost:4100
|
||||||
|
|
||||||
|
# Debug Configuration
|
||||||
|
APP_DEBUG_CHAT_WORKFLOW_ENABLED = True
|
||||||
|
APP_DEBUG_CHAT_WORKFLOW_DIR = D:/Athi/Local/Web/poweron/local/debug
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_ENABLED = True
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_DIR = D:/Athi/Local/Web/poweron/local/debug/sync
|
||||||
|
|
||||||
|
# Manadate Pre-Processing Servers
|
||||||
|
PREPROCESS_ALTHAUS_CHAT_SECRET = DEV_ENC:Z0FBQUFBQnBudkpGbEphQ3ZUMlFMQ2EwSGpoSE9NNzRJNTJtaGk1N0RGakdIYnVVeVFHZmF5OXB3QTVWLVNaZk9wNkhfQkZWRnVwRGRxem9iRzJIWXdpX1NIN2FwSExfT3c9PQ==
|
||||||
|
|
||||||
|
# Preprocessor API Configuration
|
||||||
|
PP_QUERY_API_KEY=ouho02j0rj2oijroi3rj2oijro23jr0990
|
||||||
|
PP_QUERY_BASE_URL=https://poweron-althaus-preprocess-prod-e3fegaatc7faency.switzerlandnorth-01.azurewebsites.net/api/v1/dataquery/query
|
||||||
|
|
||||||
|
# Azure Communication Services Email Configuration
|
||||||
|
MESSAGING_ACS_CONNECTION_STRING = endpoint=https://mailing-poweron-prod.switzerland.communication.azure.com/;accesskey=4UizRfBKBgMhDgQ92IYINM6dJsO1HIeL6W1DvIX9S0GtaS1PjIXqJQQJ99CAACULyCpHwxUcAAAAAZCSuSCt
|
||||||
|
MESSAGING_ACS_SENDER_EMAIL = DoNotReply@poweron.swiss
|
||||||
|
|
||||||
|
# Zurich WFS Parcels (dynamic map layer). Default: Stadt Zürich OGD. Override for full canton if wfs.zh.ch resolves.
|
||||||
|
# Connector_ZhWfsParcels_WFS_URL = https://wfs.zh.ch/av
|
||||||
|
# Connector_ZhWfsParcels_TYPENAMES = av_li_liegenschaften_a
|
||||||
|
|
||||||
|
|
@ -51,6 +51,8 @@ Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||||
Service_CLICKUP_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd4ZWVBeHVtRnpIT0VBN0tSZDhLRmFmN05DOVBOelJtLWhkVnJDRVBqUkh3bDFTZFRWaWQ1cWowdGNLUk5IQzlGN1J6RFVCaW8zRnBwLVBnclJfdWgxV3pVRzFEV2lwcW5Rc19Xa1ROWXNJcUF0ajZaYUxOUXk0WHRsRmJLM25FaHV5T2IxdV92ZW1nRjhzaGpwU0l2Wm9FTkRnY2lJVjhuNHUwT29salAxYV8wPQ==
|
Service_CLICKUP_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd4ZWVBeHVtRnpIT0VBN0tSZDhLRmFmN05DOVBOelJtLWhkVnJDRVBqUkh3bDFTZFRWaWQ1cWowdGNLUk5IQzlGN1J6RFVCaW8zRnBwLVBnclJfdWgxV3pVRzFEV2lwcW5Rc19Xa1ROWXNJcUF0ajZaYUxOUXk0WHRsRmJLM25FaHV5T2IxdV92ZW1nRjhzaGpwU0l2Wm9FTkRnY2lJVjhuNHUwT29salAxYV8wPQ==
|
||||||
Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/connect/callback
|
Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/connect/callback
|
||||||
|
|
||||||
|
# Infomaniak: no OAuth client. Users paste a Personal Access Token (kdrive + mail) per UI.
|
||||||
|
|
||||||
# Stripe Billing (both end with _SECRET for encryption script)
|
# Stripe Billing (both end with _SECRET for encryption script)
|
||||||
STRIPE_SECRET_KEY_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5aHNGejgzQmpTdmprdzQxR19KZkh3MlhYUTNseFN3WnlaWjh2SDZyalN6aU9xSktkbUQwUnZrVnlvbGVRQm4yZFdiRU5aSEk5WVJuUnR4VUwtTm9OVk1WWmJQeU5QaDdib0hfVWV5U1BfYTFXRmdoOWdnOWxkb3JFQmF3bm45UjFUVUxmWGtGRkFKUGd6bmhpQlFnaVI3Q2lLdDlsY1VESk1vOEM0ZFBJNW1qcVZ0N2tPYmRLNmVKajZ2M3o3S05lWnRRVG5LdkRseW4wQ3VjNHNQZTZUdz09
|
STRIPE_SECRET_KEY_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5aHNGejgzQmpTdmprdzQxR19KZkh3MlhYUTNseFN3WnlaWjh2SDZyalN6aU9xSktkbUQwUnZrVnlvbGVRQm4yZFdiRU5aSEk5WVJuUnR4VUwtTm9OVk1WWmJQeU5QaDdib0hfVWV5U1BfYTFXRmdoOWdnOWxkb3JFQmF3bm45UjFUVUxmWGtGRkFKUGd6bmhpQlFnaVI3Q2lLdDlsY1VESk1vOEM0ZFBJNW1qcVZ0N2tPYmRLNmVKajZ2M3o3S05lWnRRVG5LdkRseW4wQ3VjNHNQZTZUdz09
|
||||||
STRIPE_WEBHOOK_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5dDJMSHBrVk8wTzJhU2xzTTZCZWdvWmU2NGI2WklfRXRJZVUzaVYyOU9GLUZsalUwa2lPdEgtUHo0dVVvRDU1cy1saHJyU0Rxa2xQZjBuakExQzk3bmxBcU9WbEIxUEtpR1JoUFMxZG9ISGRZUXFhdFpSMGxvQUV3a0VLQllfUUtCOHZwTGdteV9rYTFOazBfSlN3ekNWblFpakJlZVlCTmNkWWQ4Sm01a1RCWTlnTlFHWVA0MkZYMlprUExrWFN2V0NVU1BTd1NKczFJbVo3VHpLdlc4UT09
|
STRIPE_WEBHOOK_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5dDJMSHBrVk8wTzJhU2xzTTZCZWdvWmU2NGI2WklfRXRJZVUzaVYyOU9GLUZsalUwa2lPdEgtUHo0dVVvRDU1cy1saHJyU0Rxa2xQZjBuakExQzk3bmxBcU9WbEIxUEtpR1JoUFMxZG9ISGRZUXFhdFpSMGxvQUV3a0VLQllfUUtCOHZwTGdteV9rYTFOazBfSlN3ekNWblFpakJlZVlCTmNkWWQ4Sm01a1RCWTlnTlFHWVA0MkZYMlprUExrWFN2V0NVU1BTd1NKczFJbVo3VHpLdlc4UT09
|
||||||
|
|
@ -77,7 +79,7 @@ Feature_SyncDelta_JIRA_DELTA_TOKEN_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpEbm0yRUJ6VUJK
|
||||||
# Teamsbot Browser Bot Service
|
# Teamsbot Browser Bot Service
|
||||||
# For local testing: run the bot locally with `npm run dev` in service-teams-browser-bot
|
# For local testing: run the bot locally with `npm run dev` in service-teams-browser-bot
|
||||||
# The bot will connect back to localhost:8000 via WebSocket
|
# The bot will connect back to localhost:8000 via WebSocket
|
||||||
TEAMSBOT_BROWSER_BOT_URL = https://cae-poweron-shared.redwater-53d21339.switzerlandnorth.azurecontainerapps.io
|
TEAMSBOT_BROWSER_BOT_URL = http://localhost:4100
|
||||||
|
|
||||||
# Debug Configuration
|
# Debug Configuration
|
||||||
APP_DEBUG_CHAT_WORKFLOW_ENABLED = True
|
APP_DEBUG_CHAT_WORKFLOW_ENABLED = True
|
||||||
|
|
|
||||||
100
env_int.20260428_213451.backup
Normal file
100
env_int.20260428_213451.backup
Normal file
|
|
@ -0,0 +1,100 @@
|
||||||
|
# Integration Environment Configuration
|
||||||
|
|
||||||
|
# System Configuration
|
||||||
|
APP_ENV_TYPE = int
|
||||||
|
APP_ENV_LABEL = Integration Instance
|
||||||
|
APP_API_URL = https://gateway-int.poweron-center.net
|
||||||
|
APP_KEY_SYSVAR = CONFIG_KEY
|
||||||
|
APP_INIT_PASS_ADMIN_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjWm41MWZ4TUZGaVlrX3pWZWNwakJsY3Facm0wLVZDd1VKeTFoZEVZQnItcEdUUnVJS1NXeDBpM2xKbGRsYmxOSmRhc29PZjJSU2txQjdLbUVrTTE1NEJjUXBHbV9NOVJWZUR3QlJkQnJvTEU9
|
||||||
|
APP_INIT_PASS_EVENT_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjdmtrakgxa0djekZVNGtTZV8wM2I5UUpCZllveVBMWXROYk5yS3BiV3JEelJSM09VYTRONHpnY3VtMGxDRk5JTEZSRFhtcDZ0RVRmZ1RicTFhb3c5dVZRQ1o4SmlkLVpPTW5MMTU2eTQ0Vkk9
|
||||||
|
|
||||||
|
# PostgreSQL DB Host
|
||||||
|
DB_HOST=gateway-int-server.postgres.database.azure.com
|
||||||
|
DB_USER=heeshkdlby
|
||||||
|
DB_PASSWORD_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjczYzOUtTa21MMGJVTUQ5UmFfdWc3YlhCbWZOeXFaNEE1QzdJV3BLVjhnalBkLVVCMm5BZzdxdlFXQXc2RHYzLWtPSFZkZE1iWG9rQ1NkVWlpRnF5TURVbnl1cm9iYXlSMGYxd1BGYVc0VDA9
|
||||||
|
DB_PORT=5432
|
||||||
|
|
||||||
|
# Security Configuration
|
||||||
|
APP_JWT_KEY_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjNUctb2RwU25iR3ZnanBOdHZhWUtIajZ1RnZzTEp4aDR0MktWRjNoeVBrY1Npd1R0VE9YVHp3M2w1cXRzbUxNaU82QUJvaDNFeVQyN05KblRWblBvbWtoT0VXbkNBbDQ5OHhwSUFnaDZGRG10Vmgtdm1YUkRsYUhFMzRVZURmSFlDTFIzVWg4MXNueDZyMGc5aVpFdWRxY3dkTExGM093ZTVUZVl5LUhGWnlRPQ==
|
||||||
|
APP_TOKEN_EXPIRY=300
|
||||||
|
|
||||||
|
# CORS Configuration
|
||||||
|
APP_ALLOWED_ORIGINS=http://localhost:8080,https://playground.poweron-center.net,https://playground-int.poweron-center.net,http://localhost:5176,https://nyla.poweron-center.net, https://nyla-int.poweron-center.net
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
APP_LOGGING_LOG_LEVEL = DEBUG
|
||||||
|
APP_LOGGING_LOG_DIR = /home/site/wwwroot/
|
||||||
|
APP_LOGGING_FORMAT = %(asctime)s - %(levelname)s - %(name)s - %(message)s
|
||||||
|
APP_LOGGING_DATE_FORMAT = %Y-%m-%d %H:%M:%S
|
||||||
|
APP_LOGGING_CONSOLE_ENABLED = True
|
||||||
|
APP_LOGGING_FILE_ENABLED = True
|
||||||
|
APP_LOGGING_ROTATION_SIZE = 10485760
|
||||||
|
APP_LOGGING_BACKUP_COUNT = 5
|
||||||
|
|
||||||
|
# OAuth: Auth app (login/JWT) vs Data app (Graph / Google APIs)
|
||||||
|
Service_MSFT_AUTH_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
|
||||||
|
Service_MSFT_AUTH_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm83T29rMDZvcV9qTG5xb1FzUkdqS1llbzRxSEJXbmpONFFtcUtfZXdtZjQybmJSMjBjMEpnRVhiOGRuczZvVFBFdVVTQV80SG9PSnRQTEpLdVViNm5wc2E5aGRLWjZ4TGF1QjVkNmdRSzBpNWNkYXVublFYclVEdEM5TVBBZWVVMW5RVWk=
|
||||||
|
Service_MSFT_AUTH_REDIRECT_URI = https://gateway-int.poweron-center.net/api/msft/auth/login/callback
|
||||||
|
Service_MSFT_DATA_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
|
||||||
|
Service_MSFT_DATA_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm83T29rMDZvcV9qTG5xb1FzUkdqS1llbzRxSEJXbmpONFFtcUtfZXdtZjQybmJSMjBjMEpnRVhiOGRuczZvVFBFdVVTQV80SG9PSnRQTEpLdVViNm5wc2E5aGRLWjZ4TGF1QjVkNmdRSzBpNWNkYXVublFYclVEdEM5TVBBZWVVMW5RVWk=
|
||||||
|
Service_MSFT_DATA_REDIRECT_URI = https://gateway-int.poweron-center.net/api/msft/auth/connect/callback
|
||||||
|
|
||||||
|
Service_GOOGLE_AUTH_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
|
||||||
|
Service_GOOGLE_AUTH_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjNThGeVRNd3hacThtRnE0bzlDa0JPUWQyaEd6QjlFckdsMGZjRlRfUks2bXV3aDdVRTF3LVRlZVY5WjVzSXV4ZGNnX002RDl3dkNYdGFzZkxVUW01My1wTHRCanVCLUozZEx4TlduQlB5MnpvNTR2SGlvbFl1YkhzTEtsSi1SOEo=
|
||||||
|
Service_GOOGLE_AUTH_REDIRECT_URI = https://gateway-int.poweron-center.net/api/google/auth/login/callback
|
||||||
|
Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
|
||||||
|
Service_GOOGLE_DATA_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjNThGeVRNd3hacThtRnE0bzlDa0JPUWQyaEd6QjlFckdsMGZjRlRfUks2bXV3aDdVRTF3LVRlZVY5WjVzSXV4ZGNnX002RDl3dkNYdGFzZkxVUW01My1wTHRCanVCLUozZEx4TlduQlB5MnpvNTR2SGlvbFl1YkhzTEtsSi1SOEo=
|
||||||
|
Service_GOOGLE_DATA_REDIRECT_URI = https://gateway-int.poweron-center.net/api/google/auth/connect/callback
|
||||||
|
|
||||||
|
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
||||||
|
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||||
|
Service_CLICKUP_CLIENT_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5SE1uVURMNVE3NkM4cHBKa2R2TjBnLWdpSXI5dHpKWGExZVFiUF95TFNnZ1NwLWFLdmh6eWFZTHVHYTBzU2FGRUpLYkVyM1NvZjZkWDZHN21qUER5ZVNOaGpCc3NrUGd3VnFTclF3OW1nUlVuWXQ1UVhDLVpyb1BwRExOeFpDeVhtbEhDVnd4TVdpbzNBNk5QQWFPdjdza0xBWGxFY1E3WFpCSUlNa1l4RDlBPQ==
|
||||||
|
Service_CLICKUP_OAUTH_REDIRECT_URI = https://gateway-int.poweron-center.net/api/clickup/auth/connect/callback
|
||||||
|
|
||||||
|
# Infomaniak OAuth -- Data App (kDrive + Mail)
|
||||||
|
Service_INFOMANIAK_DATA_CLIENT_ID = abd71a95-7c67-465a-b7ab-963cc5eccb4b
|
||||||
|
Service_INFOMANIAK_DATA_CLIENT_SECRET = jwaEZza0VnmAHA1vIQJcpaCC1O4ND6IS0mkQ0GGiVlmof7XHxUcl9YMl7TbtEINz
|
||||||
|
Service_INFOMANIAK_OAUTH_REDIRECT_URI = https://gateway-int.poweron-center.net/api/infomaniak/auth/connect/callback
|
||||||
|
|
||||||
|
# Stripe Billing (both end with _SECRET for encryption script)
|
||||||
|
STRIPE_SECRET_KEY_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5ekdBaGNGVUlOQUpncTlzLWlTV0V5OWZzQkpDczhCUGw4U1JpTHZ0d3pfYlFNWElLRlNiNlNsaDRYTGZUTkg2OUFrTW1GZXpOUjBVbmRQWjN6ekhHd2ZSQ195OHlaeWh1TmxrUm10V2R3YmdncmFLbFMzVjdqcWJMSUJPR2xuSEozclNoZG1rZVBTaWg3OFQ1Qzdxb0wyQ2RKazc2dG1aZXBUTXlvbDZqLS1KOVI5M3BGc3NQZkZRbnFpRjIwWmh2ZHlVNlpxZVo2dWNmMjQ5eW02QmtzUT09
|
||||||
|
STRIPE_WEBHOOK_SECRET = whsec_2agCQEbDPSOn2C40EJcwoPCqlvaPLF7M
|
||||||
|
STRIPE_API_VERSION = 2026-01-28.clover
|
||||||
|
STRIPE_AUTOMATIC_TAX_ENABLED = false
|
||||||
|
STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQd14OUoIL0Osj7A0ZQlr0
|
||||||
|
|
||||||
|
# AI configuration
|
||||||
|
Connector_AiOpenai_API_SECRET = INT_ENC:Z0FBQUFBQnBaSnM4MENkQ2xJVmE5WFZKUkh2SHJFby1YVXN3ZmVxRkptS3ZWRmlwdU93ZEJjSjlMV2NGbU5mS3NCdmFfcmFYTEJNZXFIQ3ozTWE4ZC1pemlQNk9wbjU1d3BPS0ZCTTZfOF8yWmVXMWx0TU1DamlJLVFhSTJXclZsY3hMVWlPcXVqQWtMdER4T252NHZUWEhUOTdIN1VGR3ltazEweXFqQ0lvb0hYWmxQQnpxb0JwcFNhRDNGWXdoRTVJWm9FalZpTUF5b1RqZlRaYnVKYkp0NWR5Vko1WWJ0Wmg2VWJzYXZ0Z3Q4UkpsTldDX2dsekhKMmM4YjRoa2RwemMwYVQwM2cyMFlvaU5mOTVTWGlROU8xY2ZVRXlxZzJqWkxURWlGZGI2STZNb0NpdEtWUnM9
|
||||||
|
Connector_AiAnthropic_API_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjT1ZlRWVJdVZMT3ljSFJDcFdxRFBRVkZhS204NnN5RDBlQ0tpenhTM0FFVktuWW9mWHNwRWx2dHB0eDBSZ0JFQnZKWlp6c01pVGREWHd1eGpERnU0Q2xhaks1clQ1ZXVsdnd2ZzhpNXNQS1BhY3FjSkdkVEhHalNaRGR4emhpakZncnpDQUVxOHVXQzVUWmtQc0FsYmFwTF9TSG5FOUFtWk5Ick1NcHFvY2s1T1c2WXlRUFFJZnh6TWhuaVpMYmppcDR0QUx0a0R6RXlwbGRYb1R4dzJkUT09
|
||||||
|
Connector_AiPerplexity_API_SECRET = INT_ENC:Z0FBQUFBQnB5dkd6UkhtU3lhYmZMSlo0bklQZ2s3UTFBSkprZTNwWkg5Q2lVa0wtenhxWXpva21xVDVMRjdKSmhpTmxWS05IUTRoRHdCbktSRVVjcVFnY1RfV0N2S2dyV0dTMlhxQlRFVm41RkFTWVQzQThuVkZwdlNuVC05QlVRVXB6Qjk3akNpYmY1MFR6R1ByMzlIMllRZlRRYVVRN2ZBPT0=
|
||||||
|
Connector_AiTavily_API_SECRET = INT_ENC:Z0FBQUFBQm8xSVRkdkJMTDY0akhXNzZDWHVYSEt1cDZoOWEzSktneHZEV2JndTNmWlNSMV9KbFNIZmQzeVlrNE5qUEIwcUlBSGM1a0hOZ3J6djIyOVhnZzI3M1dIUkdicl9FVXF3RGktMmlEYmhnaHJfWTdGUkktSXVUSGdQMC1vSEV6VE8zR2F1SVk=
|
||||||
|
Connector_AiPrivateLlm_API_SECRET = INT_ENC:Z0FBQUFBQnBudkpGSjZ1NWh0aWc1R3Z4MHNaeS1HamtUbndhcUZFZDlqUDhjSmg5eHFfdlVkU0RsVkJ2UVRaMWs3aWhraG5jSlc0YkxNWHVmR2JoSW5ENFFCdkJBM0VienlKSnhzNnBKbTJOUTFKczRfWlQ3bWpmUkRTT1I1OGNUSTlQdExacGRpeXg=
|
||||||
|
Connector_AiMistral_API_SECRET = INT_ENC:Z0FBQUFBQnBudkpGZTNtZ1E4TWIxSEU1OUlreUpxZkJIR0Vxcm9xRHRUbnBxbTQ1cXlkbnltWkJVdTdMYWZ4c3Fsam42TERWUTVhNzZFMU9xVjdyRGFCYml6bmZsZFd2YmJzemlrSWN6Q3o3X0NXX2xXNUQteTNONHdKYzJ5YVpLLWdhU2JhSTJQZnI=
|
||||||
|
|
||||||
|
Service_MSFT_TENANT_ID = common
|
||||||
|
|
||||||
|
# Google Cloud Speech Services configuration
|
||||||
|
Connector_GoogleSpeech_API_KEY_SECRET = INT_ENC:Z0FBQUFBQm8xSVRkNmVXZ1pWcHcydTF2MXF0ZGJoWHBydF85bTczTktiaEJ3Wk1vMW1mZVhDSG1yd0ZxR2ZuSGJTX0N3MWptWXFJTkNTWjh1SUVVTXI4UDVzcGdLMkU5SHJ2TUpkRlRoRWdnSldtYjNTQkh4UDJHY2xmdTdZQ1ZiMTZZcGZxS3RzaHdjV3dtVkZUcEpJcWx0b2xuQVR6ZmpoVFZPY1hNMTV2SnhDaC1IZEh4UUpLTy1ILXA4RG1zamJTbUJ4X0t2M2NkdzJPbEJxSmFpRzV3WC0wZThoVzlxcmpHZ3ZkLVlVY3REZk1vV19WQ05BOWN6cnJ4MWNYYnNiQ0FQSUVnUlpfM3BhMnlsVlZUOG5wM3pzM1lSN1UzWlZKUXRLczlHbjI1LTFvSUJ4SlVXMy1BNk43bE5Hb0RfTTVlWk9oZnFIaVg0SW5pbm9EcXRTTzU1RFlYY3dTcnpKWWNyNjN5T1BGZ0FmX253cEFncmhvZVRuM05KYzhkOEhFMFJsc2NBSEwzZVZ1R0JMOGxsekVwUE55alZaRXFrdzNWWVNGWXNmbnhKeWhQSFo2VXBTUlRPeHdvdVdncEFuOWgydEtsSUFneUN6cGVaTnBSdjNCdVJseGJFdmlMc203UFhLVlYyTENkaGg2dVN6Z2xwT1ZmTmN5bVZGUkM3ZWcyVkt2ckFUVVd3WFFwYnJjNVRobEh2SkVJbXRwUUpEOFJKQ1NUc0Q4NHNqUFhPSDh5cTV6MEcwSDEwRUJCQ2JiTTJlOE5nd3pMMkJaQ1dVYjMwZVVWWnlETmp2dkZ3aXEtQ29WNkxZTFkzYUkxdTlQUU1OTnhWWU12YU9MVnJQa1d2ZjRtUlhneTNubEMxTmp1eUNPOThSMlB3Y1F0T2tCdFNsNFlKalZPV25yR2QycVBUb096RmZ1V0FTaGsxLV9FWDBmenBIOXpMdGpLcUc0TWRoY2hlMFhYTzlET1ZRekw0ZHNwUVBQdVJBX2h6Q2ZzWVZJWTNybTJiekp3WmhmWF9SUFBXQzlqUjctcVlHWWVMZWVQallzR0JGTVF0WmtnWlg1aTM1bFprNVExZXY5dnNvWF93UjhwbkJ3RzNXaVJ2d2RRU3JJVlBvaVh4eTlBRUtqWkJia3dJQVVBV2Nqdm9FUTRUVW1TaHp2ZUwxT0N2ZndxQ2Nka1RYWXF0LWxIWFE0dTFQcVhncFFPM0hFdUUtYlFnemx3WkF4bjA1aDFULUdrZlVZbEJtRGRCdjJyVkdJSXozd0I0dF9zbWhOeHFqRDA4T1NVaWR5cjBwSVgwbllPU294NjZGTnM1bFhIdGpNQUxFOENWd3FCbGpSRFRmRXotQnU0N2lCVEU5RGF6Qi10S2U2NGdadDlrRjZtVE5oZkw5ZWFjXzhCTmxXQzNFTFgxRXVYY3J3YkxnbnlBSm9PY3h4MlM1NVFQbVNDRW5Ld1dvNWMxSmdoTXJuaE1pT2VFeXYwWXBHZ29MZDVlN2lwUUNIeGNCVVdQVi1rRXdJMWFncUlPTXR0MmZVQ1l0d09mZTdzWGFBWUJMUFd3b0RSOU8zeER2UWpNdzAxS0ZJWnB5S3FJdU9wUDJnTTNwMWw3VFVqVXQ3ZGZnU1RkUktkc0NhUHJ0SGFxZ0lVWDEzYjNtU2JfMGNWM1Y0dHlCTzNESEdENC1jUWF5MVppRzR1QlBNSUJySjFfRi1ENHEwcmJ4S3hQUFpXVHA0TG9DZWdoUlo5WnNSM1lCZm1KbEs2ak1yUUU4Wk9JcVJGUkJwc0NvUkMyTjhoTWxtZmVQeDREZVRKZkhYN2duLVNTeGZzdFdBVnhEandJSXB5QjM0azF0ckI3Tk1wSzFhNGVOUVRrNjU0cG9JQ29pN09xOFkwR1lMTlktaGp4TktxdTVtTnNEcldsV2pEZm5nQWpJc2hxY0hjQnVSWUR5VVdaUXBHWUloTzFZUC1oNzJ4UjZ1dnpLcDJxWEZtQlNIMWkzZ0hXWXdKeC1iLXdZWVJhcU04VFlpMU5pd2ZIdTdCdkVWVFVBdmJuRk16bEFFQTh4alBrcTV2RzliT2hGdTVPOXlRMjFuZktiRTZIamQ1VFVqS0hRTXhxcU1mdkgyQ1NjQmZfcjl4c3NJd0RIeDVMZUFBbHJqdEJxWWl3aWdGUEQxR3ZnMkNGdVB4RUxkZi1xOVlFQXh1NjRfbkFEaEJ5TVZlUGFrWVhSTVRPeGxqNlJDTHNsRWRrei1pYjhnUmZrb3BvWkQ2QXBzYjFHNXZoWU1LSExhLWtlYlJTZlJmYUM5Y1Rhb1pkMVYyWTByM3NTS0VXMG1ybm1BTVN2QXRYaXZqX2dKSkZrajZSS2cyVlNOQnd5Y29zMlVyaWlNbTJEb3FuUFFtbWNTNVpZTktUenFZSl91cVFXZjRkQUZyYmtPczU2S1RKQ19ONGFOTHlwX2hOOEE1UHZEVjhnT0xxRjMxTEE4SHhRbmlmTkZwVXJBdlJDbU5oZS05SzI4QVhEWDZaN2ZiSlFwUGRXSnB5TE9MZV9ia3pYcmZVa1dicG5FMHRXUFZXMWJQVDAwOEdDQzJmZEl0ZDhUOEFpZXZWWXl5Q2xwSmFienNCMldlb2NKb2ZRYV9KbUdHRzNUcjU1VUFhMzk1a2J6dDVuNTl6NTdpM0hGa3k0UWVtbF9pdDVsQVp2cndDLUU5dnNYOF9CLS0ySXhBSFdCSnpqV010bllBb3U0cEZZYVF5R2tSNFM5NlRhdS1fb1NqbDBKMkw0V2N0VEZhNExtQlR3ckZ3cVlCeHVXdXJ6X0s4cEtsaG5rVUxCN2RRbHQxTmcyVFBqYUxyOHJzeFBXVUJaRHpXbUoxdHZzMFBzQk1UTUFvX1pGNFNMNDFvZWdTdEUtMUNKMXNIeVlvQk1CeEdpZVdmN0tsSDVZZHJXSGt5c2o2MHdwSTZIMVBhRzM1eU43Q2FtcVNidExxczNJeUx5U2RuUG5EeHpCTlg2SV9WNk1ET3BRNXFuc0pNWlVvZUYtY21oRGtJSmwxQ09QbHBUV3BuS3B5NE9RVkhfellqZjJUQ0diSV94QlhQWmdaaC1TRWxsMUVWSXB0aE1McFZDZDNwQUVKZ2t5cXRTXzlRZVJwN0pZSnJSV21XMlh0TzFRVEl0c2I4QjBxOGRCYkNxek04a011X1lrb2poQ3h2LUhKTGJiUlhneHp5QWFBcE5nMElkNTVzM3JGOWtUQ19wNVBTaVVHUHFDNFJnNXJaWDNBSkMwbi1WbTdtSnFySkhNQl9ZQjZrR2xDcXhTRExhMmNHcGlyWjR3ZU9SSjRZd1l4ZjVPeHNiYk53SW5SYnZPTzNkd1lnZmFseV9tQ3BxM3lNYVBHT0J0elJnMTByZ3VHemxta0tVQzZZRllmQ2VLZ1ZCNDhUUTc3LWNCZXBMekFwWW1fQkQ1NktzNGFMYUdYTU0xbXprY1FONUNlUHNMY3h2NFJMMmhNa3VNdzF4TVFWQk9odnJUMjFJMVd3Z2N6Sms5aEM2SWlWZFViZ0JWTEpUWWM5NmIzOS1oQmRqdkt1NUUycFlVcUxERUZGbnZqTUxIYnJmMDBHZDEzbnJsWEEzSUo3UmNPUDg1dnRUU1FzcWtjTWZwUG9zM0JTY3RqMDdST2UxcXFTM0d0bGkwdFhnMk5LaUlxNWx3V1pLaVlLUFJXZzBzVl9Ia1V1OHdYUEFWOU50UndycGtCdzM0Q0NQamp2VTNqbFBLaGhsbUk5dUI5MjU5OHVySk1oY0drUWtXUloyVVRvOWJmbUVYRzFVeWNQczh2NXJCeVppRlZiWDNJaDhOSmRmX2lURTNVS3NXQXFZT1QtUmdvMWJoVWYxU3lqUUJhbzEyX3I3TXhwbm9wc1FoQ1ZUTlNBRjMyQTBTY2tzbHZ3RFUtTjVxQ0o1QXRTVks2WENwMGZCRGstNU1jN3FhUFJCQThyaFhhMVRsbnlSRXNGRmt3Yk01X21ldmV3bTItWm1JaGpZQWZROEFtT1d1UUtPQlhYVVFqT2NxLUxQenJHX3JfMEdscDRiMXcyZ1ZmU3NFMzVoelZJaDlvT0ZoRGQ2bmtlM0M5ZHlCd2ZMbnRZRkZUWHVBUEx4czNfTmtMckh5eXZrZFBzOEItOGRYOEhsMzBhZ0xlOWFjZzgteVBsdnpPT1pYdUxnbFNXYnhKaVB6QUxVdUJCOFpvU2x2c1FHZV94MDBOVWJhYkxISkswc0U5UmdPWFJLXzZNYklHTjN1QzRKaldKdEVHb0pOU284N3c2LXZGMGVleEZ5NGZ6OGV1dm1tM0J0aTQ3VFlNOEJrdEh3PT0=
|
||||||
|
|
||||||
|
# Feature SyncDelta JIRA configuration
|
||||||
|
Feature_SyncDelta_JIRA_DELTA_TOKEN_SECRET = INT_ENC:Z0FBQUFBQm8xSVRkTUNsWm4wX0p6eXFDZmJ4dFdHNEs1MV9MUzdrb3RzeC1jVWVYZ0REWHRyZkFiaGZLcUQtTXFBZzZkNzRmQ0gxbEhGbUNlVVFfR1JEQTc0aldkZkgyWnBOcjdlUlZxR0tDTEdKRExULXAyUEtsVmNTMkRKU1BJNnFiM0hlMXo4YndMcHlRMExtZDQ3Zm9vNFhMcEZCcHpBPT0=
|
||||||
|
|
||||||
|
# Teamsbot Browser Bot Service
|
||||||
|
TEAMSBOT_BROWSER_BOT_URL = https://cae-poweron-shared.redwater-53d21339.switzerlandnorth.azurecontainerapps.io
|
||||||
|
|
||||||
|
# Debug Configuration
|
||||||
|
APP_DEBUG_CHAT_WORKFLOW_ENABLED = FALSE
|
||||||
|
APP_DEBUG_CHAT_WORKFLOW_DIR = ./test-chat
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_ENABLED = FALSE
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_DIR = ./debug/sync
|
||||||
|
|
||||||
|
# Manadate Pre-Processing Servers
|
||||||
|
PREPROCESS_ALTHAUS_CHAT_SECRET = INT_ENC:Z0FBQUFBQnBaSnM4UkNBelhvckxCQUVjZm94N3BZUDcxaEMyckE2dm1lRVhqODhrWU1SUjNXZ3dQZlVJOWhveXFkZXpobW5xT0NneGZ2SkNUblFmYXd0WTBYNTl3UmRnSWc9PQ==
|
||||||
|
|
||||||
|
# Preprocessor API Configuration
|
||||||
|
PP_QUERY_API_KEY=ouho02j0rj2oijroi3rj2oijro23jr0990
|
||||||
|
PP_QUERY_BASE_URL=https://poweron-althaus-preprocess-prod-e3fegaatc7faency.switzerlandnorth-01.azurewebsites.net/api/v1/dataquery/query
|
||||||
|
|
||||||
|
# Azure Communication Services Email Configuration
|
||||||
|
MESSAGING_ACS_CONNECTION_STRING = endpoint=https://mailing-poweron-prod.switzerland.communication.azure.com/;accesskey=4UizRfBKBgMhDgQ92IYINM6dJsO1HIeL6W1DvIX9S0GtaS1PjIXqJQQJ99CAACULyCpHwxUcAAAAAZCSuSCt
|
||||||
|
MESSAGING_ACS_SENDER_EMAIL = DoNotReply@poweron.swiss
|
||||||
|
|
@ -49,11 +49,13 @@ Service_GOOGLE_DATA_REDIRECT_URI = https://gateway-int.poweron-center.net/api/go
|
||||||
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
||||||
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||||
Service_CLICKUP_CLIENT_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5SE1uVURMNVE3NkM4cHBKa2R2TjBnLWdpSXI5dHpKWGExZVFiUF95TFNnZ1NwLWFLdmh6eWFZTHVHYTBzU2FGRUpLYkVyM1NvZjZkWDZHN21qUER5ZVNOaGpCc3NrUGd3VnFTclF3OW1nUlVuWXQ1UVhDLVpyb1BwRExOeFpDeVhtbEhDVnd4TVdpbzNBNk5QQWFPdjdza0xBWGxFY1E3WFpCSUlNa1l4RDlBPQ==
|
Service_CLICKUP_CLIENT_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5SE1uVURMNVE3NkM4cHBKa2R2TjBnLWdpSXI5dHpKWGExZVFiUF95TFNnZ1NwLWFLdmh6eWFZTHVHYTBzU2FGRUpLYkVyM1NvZjZkWDZHN21qUER5ZVNOaGpCc3NrUGd3VnFTclF3OW1nUlVuWXQ1UVhDLVpyb1BwRExOeFpDeVhtbEhDVnd4TVdpbzNBNk5QQWFPdjdza0xBWGxFY1E3WFpCSUlNa1l4RDlBPQ==
|
||||||
Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/connect/callback
|
Service_CLICKUP_OAUTH_REDIRECT_URI = https://gateway-int.poweron-center.net/api/clickup/auth/connect/callback
|
||||||
|
|
||||||
|
# Infomaniak: no OAuth client. Users paste a Personal Access Token (kdrive + mail) per UI.
|
||||||
|
|
||||||
# Stripe Billing (both end with _SECRET for encryption script)
|
# Stripe Billing (both end with _SECRET for encryption script)
|
||||||
STRIPE_SECRET_KEY_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5ekdBaGNGVUlOQUpncTlzLWlTV0V5OWZzQkpDczhCUGw4U1JpTHZ0d3pfYlFNWElLRlNiNlNsaDRYTGZUTkg2OUFrTW1GZXpOUjBVbmRQWjN6ekhHd2ZSQ195OHlaeWh1TmxrUm10V2R3YmdncmFLbFMzVjdqcWJMSUJPR2xuSEozclNoZG1rZVBTaWg3OFQ1Qzdxb0wyQ2RKazc2dG1aZXBUTXlvbDZqLS1KOVI5M3BGc3NQZkZRbnFpRjIwWmh2ZHlVNlpxZVo2dWNmMjQ5eW02QmtzUT09
|
STRIPE_SECRET_KEY_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5ekdBaGNGVUlOQUpncTlzLWlTV0V5OWZzQkpDczhCUGw4U1JpTHZ0d3pfYlFNWElLRlNiNlNsaDRYTGZUTkg2OUFrTW1GZXpOUjBVbmRQWjN6ekhHd2ZSQ195OHlaeWh1TmxrUm10V2R3YmdncmFLbFMzVjdqcWJMSUJPR2xuSEozclNoZG1rZVBTaWg3OFQ1Qzdxb0wyQ2RKazc2dG1aZXBUTXlvbDZqLS1KOVI5M3BGc3NQZkZRbnFpRjIwWmh2ZHlVNlpxZVo2dWNmMjQ5eW02QmtzUT09
|
||||||
STRIPE_WEBHOOK_SECRET = whsec_2agCQEbDPSOn2C40EJcwoPCqlvaPLF7M
|
STRIPE_WEBHOOK_SECRET = INT_ENC:Z0FBQUFBQnA4UXZiUUVqTl9lREVRWTh1aHFDcFpwcXRkOUx4MS1ham9Ddkl6T0xzMnJuM1hhUHdGNG5CenY1MUg4RlJBOGFQTWl5cVd5MjJ2REItcHYyRmdLX3ZlT2p5Z3BRVkMtQnRoTVkteXlfaU92MVBtOEI0Ni1kbGlfa0NiRmFRRXNHLVE2NHI=
|
||||||
STRIPE_API_VERSION = 2026-01-28.clover
|
STRIPE_API_VERSION = 2026-01-28.clover
|
||||||
STRIPE_AUTOMATIC_TAX_ENABLED = false
|
STRIPE_AUTOMATIC_TAX_ENABLED = false
|
||||||
STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQd14OUoIL0Osj7A0ZQlr0
|
STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQd14OUoIL0Osj7A0ZQlr0
|
||||||
|
|
|
||||||
101
env_prod.20260428_213451.backup
Normal file
101
env_prod.20260428_213451.backup
Normal file
|
|
@ -0,0 +1,101 @@
|
||||||
|
# Production Environment Configuration
|
||||||
|
|
||||||
|
# System Configuration
|
||||||
|
APP_ENV_TYPE = prod
|
||||||
|
APP_ENV_LABEL = Production Instance
|
||||||
|
APP_KEY_SYSVAR = CONFIG_KEY
|
||||||
|
APP_INIT_PASS_ADMIN_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3UnJRV0sySFlDblpXUlREclREaW1WbUt6bGtQYkdrNkZDOXNOLXFua1hqeFF2RHJnRXJ5VlVGV3hOZm41QjZOMlNTb0duYXNxZi05dXVTc2xDVkx0SVBFLUhncVo5T0VUZHE0UTZLWWw3ck09
|
||||||
|
APP_INIT_PASS_EVENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3QVpIY19DQVZSSzJmc2F0VEZvQlU1cHBhTEgxdHdnR3g4eW01aTEzYTUxc1gxTDR1RVVpSHRXYjV6N1BLZUdCUGlfOW1qdy0xSHFVRkNBcGZvaGlSSkZycXRuUllaWnpyVGRoeFg1dGEyNUk9
|
||||||
|
APP_API_URL = https://gateway-prod.poweron-center.net
|
||||||
|
|
||||||
|
# PostgreSQL DB Host
|
||||||
|
DB_HOST=gateway-prod-server.postgres.database.azure.com
|
||||||
|
DB_USER=gzxxmcrdhn
|
||||||
|
DB_PASSWORD_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3Y1JScGxjZG9TdUkwaHRzSHZhRHpNcDV3N1U2TnIwZ21PRG5TWFFfR1k0N3BiRk5WelVadjlnXzVSTDZ6NXFQNFpqbnJ1R3dNVkJocm1zVEgtSk0xaDRiR19zNDBEbVIzSk51ekNlQ0Z3b0U9
|
||||||
|
DB_PORT=5432
|
||||||
|
|
||||||
|
# Security Configuration
|
||||||
|
APP_JWT_KEY_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3elhfV0Rnd2pQRjlMdkVwX1FnSmRhSzNZUlV5SVpaWXBNX1hpa2xPZGdMSWpnN2ZINHQxeGZnNHJweU5pZjlyYlY5Qm9zOUZEbl9wUEgtZHZXd1NhR19JSG9kbFU4MnFGQnllbFhRQVphRGQyNHlFVWR5VHQyUUpqN0stUmRuY2QyTi1oalczRHpLTEJqWURjZWs4YjZvT2U5YnFqcXEwdEpxV05fX05QMmtrPQ==
|
||||||
|
APP_TOKEN_EXPIRY=300
|
||||||
|
|
||||||
|
# CORS Configuration
|
||||||
|
APP_ALLOWED_ORIGINS=http://localhost:8080,https://playground.poweron-center.net,https://playground-int.poweron-center.net,http://localhost:5176,https://nyla.poweron-center.net,https://nyla-int.poweron-center.net
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
APP_LOGGING_LOG_LEVEL = DEBUG
|
||||||
|
APP_LOGGING_LOG_DIR = /home/site/wwwroot/
|
||||||
|
APP_LOGGING_FORMAT = %(asctime)s - %(levelname)s - %(name)s - %(message)s
|
||||||
|
APP_LOGGING_DATE_FORMAT = %Y-%m-%d %H:%M:%S
|
||||||
|
APP_LOGGING_CONSOLE_ENABLED = True
|
||||||
|
APP_LOGGING_FILE_ENABLED = True
|
||||||
|
APP_LOGGING_ROTATION_SIZE = 10485760
|
||||||
|
APP_LOGGING_BACKUP_COUNT = 5
|
||||||
|
|
||||||
|
# OAuth: Auth app (login/JWT) vs Data app (Graph / Google APIs)
|
||||||
|
Service_MSFT_AUTH_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
|
||||||
|
Service_MSFT_AUTH_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBESkk2T25scFU1T1pNd2FENTFRM3kzcEpSXy1HT0trQkR2Wnl3U3RYbExzRy1YUTkxd3lPZE84U2lhX3FZanp5TjhYRGluLXVjU3hjaWRBUnZLbVhtRDItZ3FxNXJ3MUxicUZTXzJWZVNrR0VKN3ZlNEtET1ppOFk0MzNmbkwyRmROUk4=
|
||||||
|
Service_MSFT_AUTH_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/msft/auth/login/callback
|
||||||
|
Service_MSFT_DATA_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
|
||||||
|
Service_MSFT_DATA_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBESkk2T25scFU1T1pNd2FENTFRM3kzcEpSXy1HT0trQkR2Wnl3U3RYbExzRy1YUTkxd3lPZE84U2lhX3FZanp5TjhYRGluLXVjU3hjaWRBUnZLbVhtRDItZ3FxNXJ3MUxicUZTXzJWZVNrR0VKN3ZlNEtET1ppOFk0MzNmbkwyRmROUk4=
|
||||||
|
Service_MSFT_DATA_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/msft/auth/connect/callback
|
||||||
|
|
||||||
|
Service_GOOGLE_AUTH_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
|
||||||
|
Service_GOOGLE_AUTH_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3eWFwSEZ4YnRJcjU1OW5kcXZKdkt1Z3gzWDFhVW5Eelh3VnpnNlppcWxweHY5UUQzeDIyVk83cW1XNVE4bllVWnR2MjlSQzFrV1UyUVV6OUt5b3Vqa3QzMUIwNFBqc2FVSXRxTlQ1OHVJZVFibnhBQ2puXzBwSXp5NUZhZjM1d1o=
|
||||||
|
Service_GOOGLE_AUTH_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/google/auth/login/callback
|
||||||
|
Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
|
||||||
|
Service_GOOGLE_DATA_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3eWFwSEZ4YnRJcjU1OW5kcXZKdkt1Z3gzWDFhVW5Eelh3VnpnNlppcWxweHY5UUQzeDIyVk83cW1XNVE4bllVWnR2MjlSQzFrV1UyUVV6OUt5b3Vqa3QzMUIwNFBqc2FVSXRxTlQ1OHVJZVFibnhBQ2puXzBwSXp5NUZhZjM1d1o=
|
||||||
|
Service_GOOGLE_DATA_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/google/auth/connect/callback
|
||||||
|
|
||||||
|
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
||||||
|
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||||
|
Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
|
||||||
|
Service_CLICKUP_OAUTH_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/clickup/auth/connect/callback
|
||||||
|
|
||||||
|
# Infomaniak OAuth -- Data App (kDrive + Mail)
|
||||||
|
Service_INFOMANIAK_DATA_CLIENT_ID = abd71a95-7c67-465a-b7ab-963cc5eccb4b
|
||||||
|
Service_INFOMANIAK_DATA_CLIENT_SECRET = jwaEZza0VnmAHA1vIQJcpaCC1O4ND6IS0mkQ0GGiVlmof7XHxUcl9YMl7TbtEINz
|
||||||
|
Service_INFOMANIAK_OAUTH_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/infomaniak/auth/connect/callback
|
||||||
|
|
||||||
|
# Stripe Billing (both end with _SECRET for encryption script)
|
||||||
|
STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
|
||||||
|
STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
|
||||||
|
STRIPE_API_VERSION = 2026-01-28.clover
|
||||||
|
STRIPE_AUTOMATIC_TAX_ENABLED = false
|
||||||
|
STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQZG8WqlVsabrfFEu49pah
|
||||||
|
|
||||||
|
|
||||||
|
# AI configuration
|
||||||
|
Connector_AiOpenai_API_SECRET = PROD_ENC:Z0FBQUFBQnBaSnM4TWJOVm4xVkx6azRlNDdxN3UxLUdwY2hhdGYxRGp4VFJqYXZIcmkxM1ZyOWV2M0Z4MHdFNkVYQ0ROb1d6LUZFUEdvMHhLMEtXYVBCRzM5TlYyY3ROYWtJRk41cDZxd0tYYi00MjVqMTh4QVcyTXl0bmVocEFHbXQwREpwNi1vODdBNmwzazE5bkpNelE2WXpvblIzWlQwbGdEelI2WXFqT1RibXVHcjNWbVhwYzBOM25XTzNmTDAwUjRvYk4yNjIyZHc5c2RSZzREQUFCdUwyb0ZuOXN1dzI2c2FKdXI4NGxEbk92czZWamJXU3ZSbUlLejZjRklRRk4tLV9aVUFZekI2bTU4OHYxNTUybDg3RVo0ZTh6dXNKRW5GNXVackZvcm9laGI0X3R6V3M9
|
||||||
|
Connector_AiAnthropic_API_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3TnhYdlhSLW5RbXJyMHFXX0V0bHhuTDlTaFJsRDl2dTdIUTFtVFAwTE8tY3hLbzNSMnVTLXd3RUZualN3MGNzc1kwOTIxVUN2WW1rYi1TendFRVVBSVNqRFVjckEzNExyTGNaUkJLMmozazUwemI1cnhrcEtZVXJrWkdaVFFramp3MWZ6RmY2aGlRMXVEYjM2M3ZlbmxMdnNCRDM1QWR0Wmd6MWVnS1I1c01nV3hRLXg3d2NTZXVfTi1Wdm16UnRyNGsyRTZ0bG9TQ1g1OFB5Z002bmQ3QT09
|
||||||
|
Connector_AiPerplexity_API_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6NG5CTm9QOFZRV1BIVC0tV2RKTGtCQWFOUXlpRnhEdjN1U2x3VUdDamtIZV9CQzQ5ZmRmcUh3ZUVUa0NxbGhlenVVdWtaYjdpcnhvUlNFLXZfOWh2dWFZai0xUGU5cWpuYmpnRVRWakh0RVNUUTFyX0w5V0NXVWFrQlZuOTd5TkI0eVRoQ0ZBSm9HYUlYamoyY1FCMmlBPT0=
|
||||||
|
Connector_AiTavily_API_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3NmItcDh6V0JpcE5Jc0NlUWZqcmllRHB5eDlNZmVnUlNVenhNTm5xWExzbjJqdE1GZ0hTSUYtb2dvdWNhTnlQNmVWQ2NGVDgwZ0MwMWZBMlNKWEhzdlF3TlZzTXhCZWM4Z1Uwb18tSTRoU1JBVTVkSkJHOTJwX291b3dPaVphVFg=
|
||||||
|
Connector_AiPrivateLlm_API_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGanZ6U3pzZWkwXzVPWGtIQ040XzFrTXc5QWRnazdEeEktaUJ0akJmNnEzbWUzNHczLTJfc2dIdzBDY0FTaXZYcDhxNFdNbTNtbEJTb2VRZ0ZYd05hdlNLR1h6SUFzVml2Z1FLY1BjTl90UWozUGxtak1URnhhZmNDRWFTb0dKVUo=
|
||||||
|
Connector_AiMistral_API_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGc2tQc2lvMk1YZk01Q1dob1U5cnR0dG03WWE3WkpoOWo0SEpvLU9Rc2lCNDExdy1wZExaN3lpT2FEQkxnaHRmWmZUUUZUUUJmblZreGlpaFpOdnFhbzlEd1RsVVJtX216cmhxTm5BcTN2eUZ2T054cDE5bmlEamJ3NGR6MVpFQnA=
|
||||||
|
|
||||||
|
Service_MSFT_TENANT_ID = common
|
||||||
|
|
||||||
|
# Google Cloud Speech Services configuration
|
||||||
|
Connector_GoogleSpeech_API_KEY_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z4NFQxaF9uN3h1cVB6dnZid1c1R1VfNDlSQ1NHMEVDZWtKanpMQ29CLXc1MXBqRm1hQ0YtWVhaejBMY1ZTOEFEVlpWQ3hrYkFza1E2RDNsYkdMMndNR0VGNTMwVDRGdURJY3hyaVFxVjEtSEYwNHJzeWM3WmlpZW9jU2E3NTgycEV2allqQ3dJRTNyRFAzaDJ6dklKeXpNRkJhYjFzUkptN2dpbkNpMklrcGxuZl9vTkt3T0JvNm1YTXd5UlkwZWptUXdWVFpnV2J4X3J2WUhIUlFkSElFVnlqMnlJRnNHTnlpMWs2R1dZc2ROWjNYZG85cndmd1E5cUZnVmZRYnVjTG43dXFmSWd2bGFfVWFWSmtpWkpndWNlSUNwcnFNU2NqZXFaV0xsY3l3SElLRkVHcHZGZERKV1ltcGhTS0dhTko1VTJLYzNoZjRkSGVEX3dTMWVVTmdDczV5cE1JQUdSbUJGUm11eFhTVjJHbkt0SzB4UG1Dc2xmbnp1Y041Y2RTeWRuWGdmQy1sTGx0MGtnM2VJQ3EyLXViRlNhTU9ybzZkR1N1bXE5SXhlZENWRFpWSGlYOWx4SUQ3UlR0ZEVxQkxNakRUVFRiUmFnbklOalphLUZkRFVVaXBRUk5NZW5PaUZydTFmQkNPSTdTVTNZd0plWXllNVFJdmN4MVcyTGlwMGFtVjBzOGRxR1FjbzhfYW5zdTB0ZEZBTTJhakltazh1dktNMUZsOUItdFdTb1pIaUxySllXNkdlY20zUS0wTnpFNTB2SU5acG1VcXhyaHBmME8takw3RDh5T043T2VGOV92TzNya2pWSlpYVjZDdXlZcjM3a0hPTlhkaW9oQmxqQlpGRFYyTTY4WmZmT3k4Tk1tdXRuSGdTUVpNT2NKenhXb05PdXBfSEdhMTNxNjdpNXlKUUI2YUgydFFPX1VvXzVJb0UxWTU2YVNiNDQ0QndZanhMMHR1cGdHWGhvcEg1QXEtSXZJdTdZUE12ZEVVWkF4QmtsQS1GYnY3SFIxSHlsOGVfcEpGS1A4QUVEQWNEOFZYYlljQ3ByTU03YU16Y0UzUnJQZEprSWNjT1ZXVEtDWi03Y3ZzRVdYUTlabXJISEo5THRHVXVuM0xqbzA4bGVlZVpOMk1QMmptb21tV0pTMlVoOXdWVU95UW1iQmttc2w1RG9mMWwxXzg1T2IxYUVmTUJEZkpUdTFDTzZ3RlBFeUFiX01iRTZNWkNaSG45TkFOM2pzbUJRZ2N0VFpoejJUTG1RODY3TzZpSzVkYUQzaEpfY2pSTkRzU0VpanlkdXVQQmJ2WU5peno4QWNLTDVxZTlhSHI3NnNiM0k0Y3JkQ0xaOU05bGtsQl8zQklvaktWSDZ4aVp2MHlYelJuUDJyTU9CZC1OZjJxNFc1dDcwSUlxaVh1LTMyWWFwU0IwUU9kOUFpMWpnOERtLTh1VmJiNGVwcXBMbU5fMjVZc0hFbmxQT2puSFd1ZGpyTkphLU5sVlBZWWxrWEZrWGJQWmVkN19tZFZfZ1l1V3pSWlA0V0ZxM2lrWnl2NU9WeTdCbDROSmhfeENKTFhMVXk1d195S2JMUFJoRXZjcVo4V2g0MTNKRnZhUE1wRkNPM3FZOGdVazJPeW5PSGpuZnFGTTdJMkRnam5rUlV6NFlqODlIelRYaEN5VjdJNnVwbllNODNCTFRHMWlXbmM1VlRxbXB3Wm9LRjVrQUpjYzRNMThUMWwwSVhBMUlyamtPZnE4R0o4bEdHay1zMjR5RDJkZ1lYRHZaNHVHU2otR3ZpN25LZlEySEU0UmdTNzJGVHNWQXMyb0dVMV9WUE13ODhZWUFaakxGOWZieGNXZkNYRnV5djEyWTZLcmdrajRBLU1rS1Z0VVRkOWlDMU9fMGVmYXFhZXJGMUhpNkdmb2hkbzZ1OWV6VlNmVzNISjVYTFh6SjJNdWR5MWZidE8yVEo2dnRrZXhMRXBPczUwTG13OGhNUVpIQm0zQmRKRnJ0Nl8wNW1Ob0dHRDVpU0NWREV3TkY2SjktdVBkMFU1ZXBmSFpHQ3FHNTRZdTJvaExpZVEtLTU4YTVyeFBpNDdEajZtWUc4c1dBeUJqQ3NIY1NLS0FIMUxGZzZxNFNkOG9ORGNHWWJCVnZuNnJVTEtoQi1mRTZyUl81ZWJJMi1KOGdERzBhNVRZeHRYUUlqY2JvMFlaNHhWMU9pWFFiZjdaLUhkaG15TTBPZVlkS2R5UVdENTI4QVFiY1RJV0ZNZnlpVWxfZmlnN1BXbGdrbjFGUkhzYl9qeHBxVVJacUE4bjZETENHVFpSamh0NVpOM2hMYTZjYzBuS3J0a3hhZGxSM1V5UHd2OTU3ZHY0Yy1xWDBkWUk0Ymp0MWVrS3YzSktKODhQZnY3QTZ1Wm1VZkZJbS1jamdreks1ZlhpQjFOUDFiOHJ2Nm9NcmdTdU5LQXV2RkZWZEFNZnVKUjVwcVY3dDdhQnpmRVJ6SmlvVXpDM0ZiYXh5bGE2X04tTE9qZ3BiTnN3TF9ZaFRxSUpjNjB1dXZBcy1TZHRHTjFjSUR3WUl4cE9VNzB5Rkk4U3Z1SVZYTl9sYXlZVk83UnFrMlVmcnBpam9lRUlCY19DdVJwOXl2TVVDV1pMRFZTZk9MY3Z1eXA0MnhGazc5YllQaWtOeTc4NjlOa2lGY05RRzY1cG9nbGpYelc4c3FicWxWRkg0YzRSamFlQ19zOU14YWJreU9pNDREZVJ3a0REMUxGTzF1XzI1bEF3VXVZRjlBeWFiLXJsOXgza3VZem1WckhWSnVNbDBNcldadU8xQ3RwOTl5NGgtVlR0QklCLWl5WkE4V1FlQTBCOVU1RE9sQlRrYUNZOGdfUmEwbEZvUTFGUEFWVmQ4V1FhOU9VNjZqemRpZm1sUDhZQTJ0YVBRbWZldkF5THV4QXpfdUtNZ0tlcGdSRFM3c0lDOTNQbnBxdmxYYWNpTmI3MW9BMlZIdTQ5RldudHpNQWQ5NDNPLVVTLXVVNzdHZXh4UXpZa3dVa2J4dTFDV1RkYjRnWXU2M3lJekRYWGNMcWU5OVh6U2xZWDh6MmpqcnpiOHlnMjA5S3RFQm1NZjNSM21adkVnTUpSYVhkTzNkNnJCTmljY0x1cl9kMkx3UHhySjZEdHREanZERzNEUTFlTkR0NWlBczAtdmFGTjdZNVpTMlkxV2czYW5RN2lqemg4eUViZDV6RjdKNXdFcUlvcVhoNkJ6eVJkR1pua1hnNzQwOEs2TXJYSlpGcW9qRDU2QjBOWFFtdXBJRkRKbmdZUF9ZSmRPVEtvUjVhLTV1NjdXQjRhS0duaEtJb2FrQnNjUTRvdFMxdkdTNk1NYlFHUFhhYTJ1eUN3WHN4UlJ4UjdrZjY0SzFGYWVFN1k0cGJnc1RjNmFUenR4NHljbVhablZSWHZmUVN3cXRHNjhsX1BSZWEzdTJUZFA0S2pTaU9YMnZIQ1ZPcGhWMFJqZkVEMWRMR1h3SnU0Z2FzZ3VGM3puNzdhVjhaQXNIWHFsbjB0TDVYSFdSNV9rdWhUUUhSZHBGYkJIVDB5SDdlMC13QTVnS0g5Qkg5RGNxSGJlelVndUhPcEQ0QkRKMTJTZUM1OXJhVm0zYjU0OVY2dk9MQVBheklIQXpVNW9Yc0ROVjEzaFZTWmVxYlBWMlNlSzladzJ6TmNuMG5FVVZkN1VZN1pfS2ZHa0lQcE80S24wSnQtVlJVV09OVWJ3M09YMkZpV2ktVF9ENHhKU2dfYUQ2aUVyamk0VHJHQmVfVHU4clpUTFoteW5aSWRPV1M0RDRMTms4NGRoYmJfVE82aUl2X3VieVJOdDhBQmRwdzdnRTVBNzZwaW93dUlZb3ZRYUtOeG9ULWxvNVp5a0haSjdkcUhRb3d6UGIxRUpCVkVYX2d6TkRqQVozUWxkNGFoc1FXYVd2YWNkME9Qclo0bjYxMFRWTy1nbnI5NTBJNzRMMDluUXRKYTFqQUN4d0d5aHVlamN3Tkk3NWJXeXR0TW9BeUg5Vnp4Q2RnZUY3b3AtMDlrNmlrSGR0eGRtbUdUd2lFRWg4MklEeWJHN2wwZEpVSXMxNDNOWjRFS0tPdWxhMmFCckhfRENIY184aEFDZXNrRDl2dHQtQW12UnRuQXJjaDJoTUpiYkNWQUtfRG9GMUZoNWM4UnBYZ29RWWs2NHcyUm5kdTF3Vk1GeFpiRUJLaVZ2UGFjbi1jV3lMV0N2ZDl4VERPN295X01NNG56ZjZkRzZoYUtmY1E5NlVXemx2SnVfb19iSXg0R2M3Mjd1a2JRPT0=
|
||||||
|
|
||||||
|
# Feature SyncDelta JIRA configuration
|
||||||
|
Feature_SyncDelta_JIRA_DELTA_TOKEN_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z4d3Z4d2x6N1FhUktMU0RKbkxfY2pTQkRzXzJ6UXVEbDNCaFM3UHMtQVFGYzNmYWs4N0lMM1R2SFJuZTVFVmx6MGVEbXc5U3NOTnY1TWN0ZDNaamlHQWloalM3VldmREJNSHQ1TlVkSVFJMTVhQWVGSVRMTGw4UTBqNGlQZFVuaHp4WUlKemR5UnBXZlh0REJFLXJ4ejR3PT0=
|
||||||
|
|
||||||
|
# Teamsbot Browser Bot Service
|
||||||
|
TEAMSBOT_BROWSER_BOT_URL = https://cae-poweron-shared.redwater-53d21339.switzerlandnorth.azurecontainerapps.io
|
||||||
|
|
||||||
|
# Debug Configuration
|
||||||
|
APP_DEBUG_CHAT_WORKFLOW_ENABLED = FALSE
|
||||||
|
APP_DEBUG_CHAT_WORKFLOW_DIR = ./test-chat
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_ENABLED = FALSE
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_DIR = ./debug/sync
|
||||||
|
|
||||||
|
# Manadate Pre-Processing Servers
|
||||||
|
PREPROCESS_ALTHAUS_CHAT_SECRET = PROD_ENC:Z0FBQUFBQnBaSnM4RVRmYW5IelNIbklTUDZIMEoycEN4ZFF0YUJoWWlUTUh2M0dhSXpYRXcwVkRGd1VieDNsYkdCRlpxMUR5Rjk1RDhPRkE5bmVtc2VDMURfLW9QNkxMVHN0M1JhbU9sa3JHWmdDZnlHS3BQRVBGTERVMHhXOVdDOWVqNkhfSUQyOHo=
|
||||||
|
|
||||||
|
# Preprocessor API Configuration
|
||||||
|
PP_QUERY_API_KEY=ouho02j0rj2oijroi3rj2oijro23jr0990
|
||||||
|
PP_QUERY_BASE_URL=https://poweron-althaus-preprocess-prod-e3fegaatc7faency.switzerlandnorth-01.azurewebsites.net/api/v1/dataquery/query
|
||||||
|
|
||||||
|
# Azure Communication Services Email Configuration
|
||||||
|
MESSAGING_ACS_CONNECTION_STRING = endpoint=https://mailing-poweron-prod.switzerland.communication.azure.com/;accesskey=4UizRfBKBgMhDgQ92IYINM6dJsO1HIeL6W1DvIX9S0GtaS1PjIXqJQQJ99CAACULyCpHwxUcAAAAAZCSuSCt
|
||||||
|
MESSAGING_ACS_SENDER_EMAIL = DoNotReply@poweron.swiss
|
||||||
|
|
@ -51,6 +51,8 @@ Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||||
Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
|
Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
|
||||||
Service_CLICKUP_OAUTH_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/clickup/auth/connect/callback
|
Service_CLICKUP_OAUTH_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/clickup/auth/connect/callback
|
||||||
|
|
||||||
|
# Infomaniak: no OAuth client. Users paste a Personal Access Token (kdrive + mail) per UI.
|
||||||
|
|
||||||
# Stripe Billing (both end with _SECRET for encryption script)
|
# Stripe Billing (both end with _SECRET for encryption script)
|
||||||
STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
|
STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
|
||||||
STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
|
STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
|
||||||
|
|
|
||||||
101
env_prod_forgejo.20260428_213451.backup
Normal file
101
env_prod_forgejo.20260428_213451.backup
Normal file
|
|
@ -0,0 +1,101 @@
|
||||||
|
# Production Environment Configuration
|
||||||
|
|
||||||
|
# System Configuration
|
||||||
|
APP_ENV_TYPE = prod
|
||||||
|
APP_ENV_LABEL = Production Instance Forgejo
|
||||||
|
APP_KEY_SYSVAR = /srv/gateway/shared/secrets/master_key.txt
|
||||||
|
APP_INIT_PASS_ADMIN_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3UnJRV0sySFlDblpXUlREclREaW1WbUt6bGtQYkdrNkZDOXNOLXFua1hqeFF2RHJnRXJ5VlVGV3hOZm41QjZOMlNTb0duYXNxZi05dXVTc2xDVkx0SVBFLUhncVo5T0VUZHE0UTZLWWw3ck09
|
||||||
|
APP_INIT_PASS_EVENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3QVpIY19DQVZSSzJmc2F0VEZvQlU1cHBhTEgxdHdnR3g4eW01aTEzYTUxc1gxTDR1RVVpSHRXYjV6N1BLZUdCUGlfOW1qdy0xSHFVRkNBcGZvaGlSSkZycXRuUllaWnpyVGRoeFg1dGEyNUk9
|
||||||
|
APP_API_URL = https://api.poweron.swiss
|
||||||
|
|
||||||
|
# PostgreSQL DB Host
|
||||||
|
DB_HOST=10.20.0.21
|
||||||
|
DB_USER=poweron_dev
|
||||||
|
DB_PASSWORD_SECRET = mypassword
|
||||||
|
DB_PORT=5432
|
||||||
|
|
||||||
|
# Security Configuration
|
||||||
|
APP_JWT_KEY_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3elhfV0Rnd2pQRjlMdkVwX1FnSmRhSzNZUlV5SVpaWXBNX1hpa2xPZGdMSWpnN2ZINHQxeGZnNHJweU5pZjlyYlY5Qm9zOUZEbl9wUEgtZHZXd1NhR19JSG9kbFU4MnFGQnllbFhRQVphRGQyNHlFVWR5VHQyUUpqN0stUmRuY2QyTi1oalczRHpLTEJqWURjZWs4YjZvT2U5YnFqcXEwdEpxV05fX05QMmtrPQ==
|
||||||
|
APP_TOKEN_EXPIRY=300
|
||||||
|
|
||||||
|
# CORS Configuration
|
||||||
|
APP_ALLOWED_ORIGINS=https://porta.poweron.swiss
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
APP_LOGGING_LOG_LEVEL = DEBUG
|
||||||
|
APP_LOGGING_LOG_DIR = srv/gateway/shared/logs
|
||||||
|
APP_LOGGING_FORMAT = %(asctime)s - %(levelname)s - %(name)s - %(message)s
|
||||||
|
APP_LOGGING_DATE_FORMAT = %Y-%m-%d %H:%M:%S
|
||||||
|
APP_LOGGING_CONSOLE_ENABLED = True
|
||||||
|
APP_LOGGING_FILE_ENABLED = True
|
||||||
|
APP_LOGGING_ROTATION_SIZE = 10485760
|
||||||
|
APP_LOGGING_BACKUP_COUNT = 5
|
||||||
|
|
||||||
|
# OAuth: Auth app (login/JWT) vs Data app (Graph / Google APIs)
|
||||||
|
Service_MSFT_AUTH_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
|
||||||
|
Service_MSFT_AUTH_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBESkk2T25scFU1T1pNd2FENTFRM3kzcEpSXy1HT0trQkR2Wnl3U3RYbExzRy1YUTkxd3lPZE84U2lhX3FZanp5TjhYRGluLXVjU3hjaWRBUnZLbVhtRDItZ3FxNXJ3MUxicUZTXzJWZVNrR0VKN3ZlNEtET1ppOFk0MzNmbkwyRmROUk4=
|
||||||
|
Service_MSFT_AUTH_REDIRECT_URI=https://api.poweron.swiss/api/msft/auth/login/callback
|
||||||
|
Service_MSFT_DATA_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
|
||||||
|
Service_MSFT_DATA_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBESkk2T25scFU1T1pNd2FENTFRM3kzcEpSXy1HT0trQkR2Wnl3U3RYbExzRy1YUTkxd3lPZE84U2lhX3FZanp5TjhYRGluLXVjU3hjaWRBUnZLbVhtRDItZ3FxNXJ3MUxicUZTXzJWZVNrR0VKN3ZlNEtET1ppOFk0MzNmbkwyRmROUk4=
|
||||||
|
Service_MSFT_DATA_REDIRECT_URI = https://api.poweron.swiss/api/msft/auth/connect/callback
|
||||||
|
|
||||||
|
Service_GOOGLE_AUTH_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
|
||||||
|
Service_GOOGLE_AUTH_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3eWFwSEZ4YnRJcjU1OW5kcXZKdkt1Z3gzWDFhVW5Eelh3VnpnNlppcWxweHY5UUQzeDIyVk83cW1XNVE4bllVWnR2MjlSQzFrV1UyUVV6OUt5b3Vqa3QzMUIwNFBqc2FVSXRxTlQ1OHVJZVFibnhBQ2puXzBwSXp5NUZhZjM1d1o=
|
||||||
|
Service_GOOGLE_AUTH_REDIRECT_URI =
|
||||||
|
Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
|
||||||
|
Service_GOOGLE_DATA_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3eWFwSEZ4YnRJcjU1OW5kcXZKdkt1Z3gzWDFhVW5Eelh3VnpnNlppcWxweHY5UUQzeDIyVk83cW1XNVE4bllVWnR2MjlSQzFrV1UyUVV6OUt5b3Vqa3QzMUIwNFBqc2FVSXRxTlQ1OHVJZVFibnhBQ2puXzBwSXp5NUZhZjM1d1o=
|
||||||
|
Service_GOOGLE_DATA_REDIRECT_URI =
|
||||||
|
|
||||||
|
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
||||||
|
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||||
|
Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
|
||||||
|
Service_CLICKUP_OAUTH_REDIRECT_URI = https://api.poweron.swiss/api/clickup/auth/connect/callback
|
||||||
|
|
||||||
|
# Infomaniak OAuth -- Data App (kDrive + Mail)
|
||||||
|
Service_INFOMANIAK_DATA_CLIENT_ID = abd71a95-7c67-465a-b7ab-963cc5eccb4b
|
||||||
|
Service_INFOMANIAK_DATA_CLIENT_SECRET = jwaEZza0VnmAHA1vIQJcpaCC1O4ND6IS0mkQ0GGiVlmof7XHxUcl9YMl7TbtEINz
|
||||||
|
Service_INFOMANIAK_OAUTH_REDIRECT_URI = https://api.poweron.swiss/api/infomaniak/auth/connect/callback
|
||||||
|
|
||||||
|
# Stripe Billing (both end with _SECRET for encryption script)
|
||||||
|
STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
|
||||||
|
STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
|
||||||
|
STRIPE_API_VERSION = 2026-01-28.clover
|
||||||
|
STRIPE_AUTOMATIC_TAX_ENABLED = false
|
||||||
|
STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQZG8WqlVsabrfFEu49pah
|
||||||
|
|
||||||
|
|
||||||
|
# AI configuration
|
||||||
|
Connector_AiOpenai_API_SECRET = PROD_ENC:Z0FBQUFBQnBaSnM4TWJOVm4xVkx6azRlNDdxN3UxLUdwY2hhdGYxRGp4VFJqYXZIcmkxM1ZyOWV2M0Z4MHdFNkVYQ0ROb1d6LUZFUEdvMHhLMEtXYVBCRzM5TlYyY3ROYWtJRk41cDZxd0tYYi00MjVqMTh4QVcyTXl0bmVocEFHbXQwREpwNi1vODdBNmwzazE5bkpNelE2WXpvblIzWlQwbGdEelI2WXFqT1RibXVHcjNWbVhwYzBOM25XTzNmTDAwUjRvYk4yNjIyZHc5c2RSZzREQUFCdUwyb0ZuOXN1dzI2c2FKdXI4NGxEbk92czZWamJXU3ZSbUlLejZjRklRRk4tLV9aVUFZekI2bTU4OHYxNTUybDg3RVo0ZTh6dXNKRW5GNXVackZvcm9laGI0X3R6V3M9
|
||||||
|
Connector_AiAnthropic_API_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3TnhYdlhSLW5RbXJyMHFXX0V0bHhuTDlTaFJsRDl2dTdIUTFtVFAwTE8tY3hLbzNSMnVTLXd3RUZualN3MGNzc1kwOTIxVUN2WW1rYi1TendFRVVBSVNqRFVjckEzNExyTGNaUkJLMmozazUwemI1cnhrcEtZVXJrWkdaVFFramp3MWZ6RmY2aGlRMXVEYjM2M3ZlbmxMdnNCRDM1QWR0Wmd6MWVnS1I1c01nV3hRLXg3d2NTZXVfTi1Wdm16UnRyNGsyRTZ0bG9TQ1g1OFB5Z002bmQ3QT09
|
||||||
|
Connector_AiPerplexity_API_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6NG5CTm9QOFZRV1BIVC0tV2RKTGtCQWFOUXlpRnhEdjN1U2x3VUdDamtIZV9CQzQ5ZmRmcUh3ZUVUa0NxbGhlenVVdWtaYjdpcnhvUlNFLXZfOWh2dWFZai0xUGU5cWpuYmpnRVRWakh0RVNUUTFyX0w5V0NXVWFrQlZuOTd5TkI0eVRoQ0ZBSm9HYUlYamoyY1FCMmlBPT0=
|
||||||
|
Connector_AiTavily_API_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3NmItcDh6V0JpcE5Jc0NlUWZqcmllRHB5eDlNZmVnUlNVenhNTm5xWExzbjJqdE1GZ0hTSUYtb2dvdWNhTnlQNmVWQ2NGVDgwZ0MwMWZBMlNKWEhzdlF3TlZzTXhCZWM4Z1Uwb18tSTRoU1JBVTVkSkJHOTJwX291b3dPaVphVFg=
|
||||||
|
Connector_AiPrivateLlm_API_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGanZ6U3pzZWkwXzVPWGtIQ040XzFrTXc5QWRnazdEeEktaUJ0akJmNnEzbWUzNHczLTJfc2dIdzBDY0FTaXZYcDhxNFdNbTNtbEJTb2VRZ0ZYd05hdlNLR1h6SUFzVml2Z1FLY1BjTl90UWozUGxtak1URnhhZmNDRWFTb0dKVUo=
|
||||||
|
Connector_AiMistral_API_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGc2tQc2lvMk1YZk01Q1dob1U5cnR0dG03WWE3WkpoOWo0SEpvLU9Rc2lCNDExdy1wZExaN3lpT2FEQkxnaHRmWmZUUUZUUUJmblZreGlpaFpOdnFhbzlEd1RsVVJtX216cmhxTm5BcTN2eUZ2T054cDE5bmlEamJ3NGR6MVpFQnA=
|
||||||
|
|
||||||
|
Service_MSFT_TENANT_ID = common
|
||||||
|
|
||||||
|
# Google Cloud Speech Services configuration
|
||||||
|
Connector_GoogleSpeech_API_KEY_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z4NFQxaF9uN3h1cVB6dnZid1c1R1VfNDlSQ1NHMEVDZWtKanpMQ29CLXc1MXBqRm1hQ0YtWVhaejBMY1ZTOEFEVlpWQ3hrYkFza1E2RDNsYkdMMndNR0VGNTMwVDRGdURJY3hyaVFxVjEtSEYwNHJzeWM3WmlpZW9jU2E3NTgycEV2allqQ3dJRTNyRFAzaDJ6dklKeXpNRkJhYjFzUkptN2dpbkNpMklrcGxuZl9vTkt3T0JvNm1YTXd5UlkwZWptUXdWVFpnV2J4X3J2WUhIUlFkSElFVnlqMnlJRnNHTnlpMWs2R1dZc2ROWjNYZG85cndmd1E5cUZnVmZRYnVjTG43dXFmSWd2bGFfVWFWSmtpWkpndWNlSUNwcnFNU2NqZXFaV0xsY3l3SElLRkVHcHZGZERKV1ltcGhTS0dhTko1VTJLYzNoZjRkSGVEX3dTMWVVTmdDczV5cE1JQUdSbUJGUm11eFhTVjJHbkt0SzB4UG1Dc2xmbnp1Y041Y2RTeWRuWGdmQy1sTGx0MGtnM2VJQ3EyLXViRlNhTU9ybzZkR1N1bXE5SXhlZENWRFpWSGlYOWx4SUQ3UlR0ZEVxQkxNakRUVFRiUmFnbklOalphLUZkRFVVaXBRUk5NZW5PaUZydTFmQkNPSTdTVTNZd0plWXllNVFJdmN4MVcyTGlwMGFtVjBzOGRxR1FjbzhfYW5zdTB0ZEZBTTJhakltazh1dktNMUZsOUItdFdTb1pIaUxySllXNkdlY20zUS0wTnpFNTB2SU5acG1VcXhyaHBmME8takw3RDh5T043T2VGOV92TzNya2pWSlpYVjZDdXlZcjM3a0hPTlhkaW9oQmxqQlpGRFYyTTY4WmZmT3k4Tk1tdXRuSGdTUVpNT2NKenhXb05PdXBfSEdhMTNxNjdpNXlKUUI2YUgydFFPX1VvXzVJb0UxWTU2YVNiNDQ0QndZanhMMHR1cGdHWGhvcEg1QXEtSXZJdTdZUE12ZEVVWkF4QmtsQS1GYnY3SFIxSHlsOGVfcEpGS1A4QUVEQWNEOFZYYlljQ3ByTU03YU16Y0UzUnJQZEprSWNjT1ZXVEtDWi03Y3ZzRVdYUTlabXJISEo5THRHVXVuM0xqbzA4bGVlZVpOMk1QMmptb21tV0pTMlVoOXdWVU95UW1iQmttc2w1RG9mMWwxXzg1T2IxYUVmTUJEZkpUdTFDTzZ3RlBFeUFiX01iRTZNWkNaSG45TkFOM2pzbUJRZ2N0VFpoejJUTG1RODY3TzZpSzVkYUQzaEpfY2pSTkRzU0VpanlkdXVQQmJ2WU5peno4QWNLTDVxZTlhSHI3NnNiM0k0Y3JkQ0xaOU05bGtsQl8zQklvaktWSDZ4aVp2MHlYelJuUDJyTU9CZC1OZjJxNFc1dDcwSUlxaVh1LTMyWWFwU0IwUU9kOUFpMWpnOERtLTh1VmJiNGVwcXBMbU5fMjVZc0hFbmxQT2puSFd1ZGpyTkphLU5sVlBZWWxrWEZrWGJQWmVkN19tZFZfZ1l1V3pSWlA0V0ZxM2lrWnl2NU9WeTdCbDROSmhfeENKTFhMVXk1d195S2JMUFJoRXZjcVo4V2g0MTNKRnZhUE1wRkNPM3FZOGdVazJPeW5PSGpuZnFGTTdJMkRnam5rUlV6NFlqODlIelRYaEN5VjdJNnVwbllNODNCTFRHMWlXbmM1VlRxbXB3Wm9LRjVrQUpjYzRNMThUMWwwSVhBMUlyamtPZnE4R0o4bEdHay1zMjR5RDJkZ1lYRHZaNHVHU2otR3ZpN25LZlEySEU0UmdTNzJGVHNWQXMyb0dVMV9WUE13ODhZWUFaakxGOWZieGNXZkNYRnV5djEyWTZLcmdrajRBLU1rS1Z0VVRkOWlDMU9fMGVmYXFhZXJGMUhpNkdmb2hkbzZ1OWV6VlNmVzNISjVYTFh6SjJNdWR5MWZidE8yVEo2dnRrZXhMRXBPczUwTG13OGhNUVpIQm0zQmRKRnJ0Nl8wNW1Ob0dHRDVpU0NWREV3TkY2SjktdVBkMFU1ZXBmSFpHQ3FHNTRZdTJvaExpZVEtLTU4YTVyeFBpNDdEajZtWUc4c1dBeUJqQ3NIY1NLS0FIMUxGZzZxNFNkOG9ORGNHWWJCVnZuNnJVTEtoQi1mRTZyUl81ZWJJMi1KOGdERzBhNVRZeHRYUUlqY2JvMFlaNHhWMU9pWFFiZjdaLUhkaG15TTBPZVlkS2R5UVdENTI4QVFiY1RJV0ZNZnlpVWxfZmlnN1BXbGdrbjFGUkhzYl9qeHBxVVJacUE4bjZETENHVFpSamh0NVpOM2hMYTZjYzBuS3J0a3hhZGxSM1V5UHd2OTU3ZHY0Yy1xWDBkWUk0Ymp0MWVrS3YzSktKODhQZnY3QTZ1Wm1VZkZJbS1jamdreks1ZlhpQjFOUDFiOHJ2Nm9NcmdTdU5LQXV2RkZWZEFNZnVKUjVwcVY3dDdhQnpmRVJ6SmlvVXpDM0ZiYXh5bGE2X04tTE9qZ3BiTnN3TF9ZaFRxSUpjNjB1dXZBcy1TZHRHTjFjSUR3WUl4cE9VNzB5Rkk4U3Z1SVZYTl9sYXlZVk83UnFrMlVmcnBpam9lRUlCY19DdVJwOXl2TVVDV1pMRFZTZk9MY3Z1eXA0MnhGazc5YllQaWtOeTc4NjlOa2lGY05RRzY1cG9nbGpYelc4c3FicWxWRkg0YzRSamFlQ19zOU14YWJreU9pNDREZVJ3a0REMUxGTzF1XzI1bEF3VXVZRjlBeWFiLXJsOXgza3VZem1WckhWSnVNbDBNcldadU8xQ3RwOTl5NGgtVlR0QklCLWl5WkE4V1FlQTBCOVU1RE9sQlRrYUNZOGdfUmEwbEZvUTFGUEFWVmQ4V1FhOU9VNjZqemRpZm1sUDhZQTJ0YVBRbWZldkF5THV4QXpfdUtNZ0tlcGdSRFM3c0lDOTNQbnBxdmxYYWNpTmI3MW9BMlZIdTQ5RldudHpNQWQ5NDNPLVVTLXVVNzdHZXh4UXpZa3dVa2J4dTFDV1RkYjRnWXU2M3lJekRYWGNMcWU5OVh6U2xZWDh6MmpqcnpiOHlnMjA5S3RFQm1NZjNSM21adkVnTUpSYVhkTzNkNnJCTmljY0x1cl9kMkx3UHhySjZEdHREanZERzNEUTFlTkR0NWlBczAtdmFGTjdZNVpTMlkxV2czYW5RN2lqemg4eUViZDV6RjdKNXdFcUlvcVhoNkJ6eVJkR1pua1hnNzQwOEs2TXJYSlpGcW9qRDU2QjBOWFFtdXBJRkRKbmdZUF9ZSmRPVEtvUjVhLTV1NjdXQjRhS0duaEtJb2FrQnNjUTRvdFMxdkdTNk1NYlFHUFhhYTJ1eUN3WHN4UlJ4UjdrZjY0SzFGYWVFN1k0cGJnc1RjNmFUenR4NHljbVhablZSWHZmUVN3cXRHNjhsX1BSZWEzdTJUZFA0S2pTaU9YMnZIQ1ZPcGhWMFJqZkVEMWRMR1h3SnU0Z2FzZ3VGM3puNzdhVjhaQXNIWHFsbjB0TDVYSFdSNV9rdWhUUUhSZHBGYkJIVDB5SDdlMC13QTVnS0g5Qkg5RGNxSGJlelVndUhPcEQ0QkRKMTJTZUM1OXJhVm0zYjU0OVY2dk9MQVBheklIQXpVNW9Yc0ROVjEzaFZTWmVxYlBWMlNlSzladzJ6TmNuMG5FVVZkN1VZN1pfS2ZHa0lQcE80S24wSnQtVlJVV09OVWJ3M09YMkZpV2ktVF9ENHhKU2dfYUQ2aUVyamk0VHJHQmVfVHU4clpUTFoteW5aSWRPV1M0RDRMTms4NGRoYmJfVE82aUl2X3VieVJOdDhBQmRwdzdnRTVBNzZwaW93dUlZb3ZRYUtOeG9ULWxvNVp5a0haSjdkcUhRb3d6UGIxRUpCVkVYX2d6TkRqQVozUWxkNGFoc1FXYVd2YWNkME9Qclo0bjYxMFRWTy1nbnI5NTBJNzRMMDluUXRKYTFqQUN4d0d5aHVlamN3Tkk3NWJXeXR0TW9BeUg5Vnp4Q2RnZUY3b3AtMDlrNmlrSGR0eGRtbUdUd2lFRWg4MklEeWJHN2wwZEpVSXMxNDNOWjRFS0tPdWxhMmFCckhfRENIY184aEFDZXNrRDl2dHQtQW12UnRuQXJjaDJoTUpiYkNWQUtfRG9GMUZoNWM4UnBYZ29RWWs2NHcyUm5kdTF3Vk1GeFpiRUJLaVZ2UGFjbi1jV3lMV0N2ZDl4VERPN295X01NNG56ZjZkRzZoYUtmY1E5NlVXemx2SnVfb19iSXg0R2M3Mjd1a2JRPT0=
|
||||||
|
|
||||||
|
# Feature SyncDelta JIRA configuration
|
||||||
|
Feature_SyncDelta_JIRA_DELTA_TOKEN_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z4d3Z4d2x6N1FhUktMU0RKbkxfY2pTQkRzXzJ6UXVEbDNCaFM3UHMtQVFGYzNmYWs4N0lMM1R2SFJuZTVFVmx6MGVEbXc5U3NOTnY1TWN0ZDNaamlHQWloalM3VldmREJNSHQ1TlVkSVFJMTVhQWVGSVRMTGw4UTBqNGlQZFVuaHp4WUlKemR5UnBXZlh0REJFLXJ4ejR3PT0=
|
||||||
|
|
||||||
|
# Teamsbot Browser Bot Service
|
||||||
|
TEAMSBOT_BROWSER_BOT_URL = https://cae-poweron-shared.redwater-53d21339.switzerlandnorth.azurecontainerapps.io
|
||||||
|
|
||||||
|
# Debug Configuration
|
||||||
|
APP_DEBUG_CHAT_WORKFLOW_ENABLED = FALSE
|
||||||
|
APP_DEBUG_CHAT_WORKFLOW_DIR = ./test-chat
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_ENABLED = FALSE
|
||||||
|
APP_DEBUG_ACCOUNTING_SYNC_DIR = ./debug/sync
|
||||||
|
|
||||||
|
# Manadate Pre-Processing Servers
|
||||||
|
PREPROCESS_ALTHAUS_CHAT_SECRET = PROD_ENC:Z0FBQUFBQnBaSnM4RVRmYW5IelNIbklTUDZIMEoycEN4ZFF0YUJoWWlUTUh2M0dhSXpYRXcwVkRGd1VieDNsYkdCRlpxMUR5Rjk1RDhPRkE5bmVtc2VDMURfLW9QNkxMVHN0M1JhbU9sa3JHWmdDZnlHS3BQRVBGTERVMHhXOVdDOWVqNkhfSUQyOHo=
|
||||||
|
|
||||||
|
# Preprocessor API Configuration
|
||||||
|
PP_QUERY_API_KEY=ouho02j0rj2oijroi3rj2oijro23jr0990
|
||||||
|
PP_QUERY_BASE_URL=https://poweron-althaus-preprocess-prod-e3fegaatc7faency.switzerlandnorth-01.azurewebsites.net/api/v1/dataquery/query
|
||||||
|
|
||||||
|
# Azure Communication Services Email Configuration
|
||||||
|
MESSAGING_ACS_CONNECTION_STRING = endpoint=https://mailing-poweron-prod.switzerland.communication.azure.com/;accesskey=4UizRfBKBgMhDgQ92IYINM6dJsO1HIeL6W1DvIX9S0GtaS1PjIXqJQQJ99CAACULyCpHwxUcAAAAAZCSuSCt
|
||||||
|
MESSAGING_ACS_SENDER_EMAIL = DoNotReply@poweron.swiss
|
||||||
|
|
@ -11,7 +11,7 @@ APP_API_URL = https://api.poweron.swiss
|
||||||
# PostgreSQL DB Host
|
# PostgreSQL DB Host
|
||||||
DB_HOST=10.20.0.21
|
DB_HOST=10.20.0.21
|
||||||
DB_USER=poweron_dev
|
DB_USER=poweron_dev
|
||||||
DB_PASSWORD_SECRET = mypassword
|
DB_PASSWORD_SECRET = PROD_ENC:Z0FBQUFBQnA4UXZiMnRoUzVlbVRLX3JTRl94cVpMaURtMndZVmFBYXdvdnIxLV81dWwxWmhmcUlCMUFZbDhRT2NsQmNqSl9ZMmRWRVN1Y2JqNlVwOXRJY1VBTm1oSjNiaFE9PQ==
|
||||||
DB_PORT=5432
|
DB_PORT=5432
|
||||||
|
|
||||||
# Security Configuration
|
# Security Configuration
|
||||||
|
|
@ -51,6 +51,8 @@ Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||||
Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
|
Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
|
||||||
Service_CLICKUP_OAUTH_REDIRECT_URI = https://api.poweron.swiss/api/clickup/auth/connect/callback
|
Service_CLICKUP_OAUTH_REDIRECT_URI = https://api.poweron.swiss/api/clickup/auth/connect/callback
|
||||||
|
|
||||||
|
# Infomaniak: no OAuth client. Users paste a Personal Access Token (kdrive + mail) per UI.
|
||||||
|
|
||||||
# Stripe Billing (both end with _SECRET for encryption script)
|
# Stripe Billing (both end with _SECRET for encryption script)
|
||||||
STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
|
STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
|
||||||
STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
|
STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,7 @@ import logging
|
||||||
import importlib
|
import importlib
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
import threading
|
||||||
from typing import Dict, List, Optional, Any, Tuple
|
from typing import Dict, List, Optional, Any, Tuple
|
||||||
from modules.datamodels.datamodelAi import AiModel
|
from modules.datamodels.datamodelAi import AiModel
|
||||||
from .aicoreBase import BaseConnectorAi
|
from .aicoreBase import BaseConnectorAi
|
||||||
|
|
@ -31,11 +32,37 @@ class ModelRegistry:
|
||||||
self._connectors: Dict[str, BaseConnectorAi] = {}
|
self._connectors: Dict[str, BaseConnectorAi] = {}
|
||||||
self._lastRefresh: Optional[float] = None
|
self._lastRefresh: Optional[float] = None
|
||||||
self._refreshInterval: float = 300.0 # 5 minutes
|
self._refreshInterval: float = 300.0 # 5 minutes
|
||||||
|
self._refreshLock = threading.Lock()
|
||||||
self._connectorsInitialized: bool = False
|
self._connectorsInitialized: bool = False
|
||||||
self._discoveredConnectorsCache: Optional[List[BaseConnectorAi]] = None # Avoid re-instantiating on every discoverConnectors() call
|
self._discoveredConnectorsCache: Optional[List[BaseConnectorAi]] = None # Avoid re-instantiating on every discoverConnectors() call
|
||||||
self._getAvailableModelsCache: Dict[Tuple[str, int], Tuple[List[AiModel], float]] = {} # (user_id, rbac_id) -> (models, ts)
|
self._getAvailableModelsCache: Dict[Tuple[str, int], Tuple[List[AiModel], float]] = {} # (user_id, rbac_id) -> (models, ts)
|
||||||
self._getAvailableModelsCacheTtl: float = 30.0 # seconds
|
self._getAvailableModelsCacheTtl: float = 30.0 # seconds
|
||||||
|
|
||||||
|
def _addModelToDict(self, model: AiModel, connectorType: str, target: Dict[str, AiModel]):
|
||||||
|
"""Add model to a dict, tolerating benign re-adds from the same connector."""
|
||||||
|
if model.displayName in target:
|
||||||
|
existing = target[model.displayName]
|
||||||
|
if existing.name == model.name and existing.connectorType == model.connectorType:
|
||||||
|
logger.debug(f"Skipping duplicate model '{model.displayName}' from same connector {connectorType}")
|
||||||
|
return
|
||||||
|
raise ValueError(
|
||||||
|
f"displayName conflict '{model.displayName}': "
|
||||||
|
f"existing name='{existing.name}' (connector: {existing.connectorType}), "
|
||||||
|
f"new name='{model.name}' (connector: {connectorType})"
|
||||||
|
)
|
||||||
|
|
||||||
|
if TESTING_MAX_TOKENS_OVERRIDE is not None and model.maxTokens > TESTING_MAX_TOKENS_OVERRIDE:
|
||||||
|
originalMaxTokens = model.maxTokens
|
||||||
|
model.maxTokens = TESTING_MAX_TOKENS_OVERRIDE
|
||||||
|
logger.debug(f"TESTING: Overrode maxTokens for {model.displayName}: {originalMaxTokens} -> {TESTING_MAX_TOKENS_OVERRIDE}")
|
||||||
|
|
||||||
|
target[model.displayName] = model
|
||||||
|
logger.debug(f"Registered model: {model.displayName} (name: {model.name}) from {connectorType}")
|
||||||
|
|
||||||
|
def _addModel(self, model: AiModel, connectorType: str):
|
||||||
|
"""Convenience wrapper for adding to self._models."""
|
||||||
|
self._addModelToDict(model, connectorType, self._models)
|
||||||
|
|
||||||
def registerConnector(self, connector: BaseConnectorAi):
|
def registerConnector(self, connector: BaseConnectorAi):
|
||||||
"""Register a connector and collect its models."""
|
"""Register a connector and collect its models."""
|
||||||
connectorType = connector.getConnectorType()
|
connectorType = connector.getConnectorType()
|
||||||
|
|
@ -47,26 +74,10 @@ class ModelRegistry:
|
||||||
|
|
||||||
self._connectors[connectorType] = connector
|
self._connectors[connectorType] = connector
|
||||||
|
|
||||||
# Collect models from this connector
|
|
||||||
try:
|
try:
|
||||||
models = connector.getCachedModels()
|
models = connector.getCachedModels()
|
||||||
for model in models:
|
for model in models:
|
||||||
# Validate displayName uniqueness
|
self._addModel(model, connectorType)
|
||||||
if model.displayName in self._models:
|
|
||||||
existingModel = self._models[model.displayName]
|
|
||||||
errorMsg = f"Duplicate displayName '{model.displayName}' detected! Existing model: displayName='{existingModel.displayName}', name='{existingModel.name}' (connector: {existingModel.connectorType}), New model: displayName='{model.displayName}', name='{model.name}' (connector: {connectorType}). displayName must be unique."
|
|
||||||
logger.error(errorMsg)
|
|
||||||
raise ValueError(errorMsg)
|
|
||||||
|
|
||||||
# TODO TESTING: Override maxTokens if testing override is enabled
|
|
||||||
if TESTING_MAX_TOKENS_OVERRIDE is not None and model.maxTokens > TESTING_MAX_TOKENS_OVERRIDE:
|
|
||||||
originalMaxTokens = model.maxTokens
|
|
||||||
model.maxTokens = TESTING_MAX_TOKENS_OVERRIDE
|
|
||||||
logger.debug(f"TESTING: Overrode maxTokens for {model.displayName}: {originalMaxTokens} -> {TESTING_MAX_TOKENS_OVERRIDE}")
|
|
||||||
|
|
||||||
# Use displayName as the key (must be unique)
|
|
||||||
self._models[model.displayName] = model
|
|
||||||
logger.debug(f"Registered model: {model.displayName} (name: {model.name}) from {connectorType}")
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to register models from {connectorType}: {e}")
|
logger.error(f"Failed to register models from {connectorType}: {e}")
|
||||||
raise
|
raise
|
||||||
|
|
@ -116,51 +127,40 @@ class ModelRegistry:
|
||||||
self._connectorsInitialized = True
|
self._connectorsInitialized = True
|
||||||
|
|
||||||
def refreshModels(self, force: bool = False):
|
def refreshModels(self, force: bool = False):
|
||||||
"""Refresh models from all registered connectors."""
|
"""Refresh models from all registered connectors. Thread-safe via _refreshLock."""
|
||||||
import time
|
|
||||||
|
|
||||||
self.ensureConnectorsRegistered()
|
self.ensureConnectorsRegistered()
|
||||||
|
|
||||||
currentTime = time.time()
|
currentTime = time.time()
|
||||||
|
|
||||||
# Check if refresh is needed
|
|
||||||
if (not force and
|
if (not force and
|
||||||
self._lastRefresh is not None and
|
self._lastRefresh is not None and
|
||||||
currentTime - self._lastRefresh < self._refreshInterval):
|
currentTime - self._lastRefresh < self._refreshInterval):
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.info("Refreshing model registry...")
|
if not self._refreshLock.acquire(blocking=False):
|
||||||
|
logger.debug("refreshModels already running in another thread, skipping")
|
||||||
|
return
|
||||||
|
|
||||||
# Clear existing models
|
try:
|
||||||
self._models.clear()
|
logger.info("Refreshing model registry...")
|
||||||
|
newModels: Dict[str, AiModel] = {}
|
||||||
|
|
||||||
# Re-register all connectors
|
for connector in self._connectors.values():
|
||||||
for connector in self._connectors.values():
|
connectorType = connector.getConnectorType()
|
||||||
try:
|
try:
|
||||||
connector.clearCache() # Clear connector cache
|
connector.clearCache()
|
||||||
models = connector.getCachedModels()
|
models = connector.getCachedModels()
|
||||||
for model in models:
|
for model in models:
|
||||||
# Validate displayName uniqueness
|
self._addModelToDict(model, connectorType, newModels)
|
||||||
if model.displayName in self._models:
|
except Exception as e:
|
||||||
existingModel = self._models[model.displayName]
|
logger.error(f"Failed to refresh models from {connectorType}: {e}")
|
||||||
errorMsg = f"Duplicate displayName '{model.displayName}' detected! Existing model: displayName='{existingModel.displayName}', name='{existingModel.name}' (connector: {existingModel.connectorType}), New model: displayName='{model.displayName}', name='{model.name}' (connector: {connector.getConnectorType()}). displayName must be unique."
|
raise
|
||||||
logger.error(errorMsg)
|
|
||||||
raise ValueError(errorMsg)
|
|
||||||
|
|
||||||
# TODO TESTING: Override maxTokens if testing override is enabled
|
self._models = newModels
|
||||||
if TESTING_MAX_TOKENS_OVERRIDE is not None and model.maxTokens > TESTING_MAX_TOKENS_OVERRIDE:
|
self._lastRefresh = time.time()
|
||||||
originalMaxTokens = model.maxTokens
|
logger.info(f"Model registry refreshed: {len(self._models)} models available")
|
||||||
model.maxTokens = TESTING_MAX_TOKENS_OVERRIDE
|
finally:
|
||||||
logger.debug(f"TESTING: Overrode maxTokens for {model.displayName}: {originalMaxTokens} -> {TESTING_MAX_TOKENS_OVERRIDE}")
|
self._refreshLock.release()
|
||||||
|
|
||||||
# Use displayName as the key (must be unique)
|
|
||||||
self._models[model.displayName] = model
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to refresh models from {connector.getConnectorType()}: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
self._lastRefresh = currentTime
|
|
||||||
logger.info(f"Model registry refreshed: {len(self._models)} models available")
|
|
||||||
|
|
||||||
def getModel(self, displayName: str) -> Optional[AiModel]:
|
def getModel(self, displayName: str) -> Optional[AiModel]:
|
||||||
"""Get a specific model by displayName (displayName must be unique)."""
|
"""Get a specific model by displayName (displayName must be unique)."""
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,35 @@ from modules.datamodels.datamodelAi import AiModel, PriorityEnum, ProcessingMode
|
||||||
# Configure logger
|
# Configure logger
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _supportsCustomTemperature(modelName: str) -> bool:
|
||||||
|
"""Check whether an Anthropic model accepts a custom ``temperature``.
|
||||||
|
|
||||||
|
Anthropic's Extended-Thinking models (Claude 4.7 Opus and the
|
||||||
|
upcoming 4.7 Sonnet/Haiku, plus all 5.x and beyond) reject every
|
||||||
|
``temperature`` value with HTTP 400
|
||||||
|
``{"error": "`temperature` is deprecated for this model."}`` --
|
||||||
|
only the model's internal default is accepted. Older Claude 4.5 /
|
||||||
|
4.6 models still accept any value in [0, 1].
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if ``temperature`` may be sent; False if it must be omitted.
|
||||||
|
"""
|
||||||
|
if not modelName:
|
||||||
|
return True
|
||||||
|
name = modelName.lower()
|
||||||
|
if name.startswith("claude-opus-4-7"):
|
||||||
|
return False
|
||||||
|
if name.startswith("claude-sonnet-4-7"):
|
||||||
|
return False
|
||||||
|
if name.startswith("claude-haiku-4-7"):
|
||||||
|
return False
|
||||||
|
# 5.x and beyond: same Extended-Thinking family, no custom temperature.
|
||||||
|
if name.startswith("claude-opus-5") or name.startswith("claude-sonnet-5") or name.startswith("claude-haiku-5"):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def loadConfigData():
|
def loadConfigData():
|
||||||
"""Load configuration data for Anthropic connector"""
|
"""Load configuration data for Anthropic connector"""
|
||||||
return {
|
return {
|
||||||
|
|
@ -49,6 +78,102 @@ class AiAnthropic(BaseConnectorAi):
|
||||||
def getModels(self) -> List[AiModel]:
|
def getModels(self) -> List[AiModel]:
|
||||||
# Get all available Anthropic models.
|
# Get all available Anthropic models.
|
||||||
return [
|
return [
|
||||||
|
AiModel(
|
||||||
|
name="claude-opus-4-7",
|
||||||
|
displayName="Anthropic Claude Opus 4.7",
|
||||||
|
connectorType="anthropic",
|
||||||
|
apiUrl="https://api.anthropic.com/v1/messages",
|
||||||
|
temperature=0.2,
|
||||||
|
maxTokens=128000,
|
||||||
|
contextLength=1000000,
|
||||||
|
costPer1kTokensInput=0.005, # $5/M tokens (Anthropic API, 2026-04)
|
||||||
|
costPer1kTokensOutput=0.025, # $25/M tokens
|
||||||
|
speedRating=5,
|
||||||
|
qualityRating=10,
|
||||||
|
functionCall=self.callAiBasic,
|
||||||
|
functionCallStream=self.callAiBasicStream,
|
||||||
|
priority=PriorityEnum.QUALITY,
|
||||||
|
processingMode=ProcessingModeEnum.DETAILED,
|
||||||
|
operationTypes=createOperationTypeRatings(
|
||||||
|
(OperationTypeEnum.PLAN, 10),
|
||||||
|
(OperationTypeEnum.DATA_ANALYSE, 9),
|
||||||
|
(OperationTypeEnum.DATA_GENERATE, 10),
|
||||||
|
(OperationTypeEnum.DATA_EXTRACT, 9),
|
||||||
|
(OperationTypeEnum.AGENT, 10),
|
||||||
|
(OperationTypeEnum.DATA_QUERY, 3),
|
||||||
|
),
|
||||||
|
version="claude-opus-4-7",
|
||||||
|
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.025
|
||||||
|
),
|
||||||
|
AiModel(
|
||||||
|
name="claude-sonnet-4-6",
|
||||||
|
displayName="Anthropic Claude Sonnet 4.6",
|
||||||
|
connectorType="anthropic",
|
||||||
|
apiUrl="https://api.anthropic.com/v1/messages",
|
||||||
|
temperature=0.2,
|
||||||
|
maxTokens=64000,
|
||||||
|
contextLength=1000000,
|
||||||
|
costPer1kTokensInput=0.003, # $3/M tokens
|
||||||
|
costPer1kTokensOutput=0.015, # $15/M tokens
|
||||||
|
speedRating=7,
|
||||||
|
qualityRating=10,
|
||||||
|
functionCall=self.callAiBasic,
|
||||||
|
functionCallStream=self.callAiBasicStream,
|
||||||
|
priority=PriorityEnum.BALANCED,
|
||||||
|
processingMode=ProcessingModeEnum.ADVANCED,
|
||||||
|
operationTypes=createOperationTypeRatings(
|
||||||
|
(OperationTypeEnum.PLAN, 9),
|
||||||
|
(OperationTypeEnum.DATA_ANALYSE, 9),
|
||||||
|
(OperationTypeEnum.DATA_GENERATE, 9),
|
||||||
|
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||||
|
(OperationTypeEnum.AGENT, 9),
|
||||||
|
(OperationTypeEnum.DATA_QUERY, 9),
|
||||||
|
),
|
||||||
|
version="claude-sonnet-4-6",
|
||||||
|
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.003 + (bytesReceived / 4 / 1000) * 0.015
|
||||||
|
),
|
||||||
|
AiModel(
|
||||||
|
name="claude-opus-4-7",
|
||||||
|
displayName="Anthropic Claude Opus 4.7 Vision",
|
||||||
|
connectorType="anthropic",
|
||||||
|
apiUrl="https://api.anthropic.com/v1/messages",
|
||||||
|
temperature=0.2,
|
||||||
|
maxTokens=128000,
|
||||||
|
contextLength=1000000,
|
||||||
|
costPer1kTokensInput=0.005,
|
||||||
|
costPer1kTokensOutput=0.025,
|
||||||
|
speedRating=5,
|
||||||
|
qualityRating=10,
|
||||||
|
functionCall=self.callAiImage,
|
||||||
|
priority=PriorityEnum.QUALITY,
|
||||||
|
processingMode=ProcessingModeEnum.DETAILED,
|
||||||
|
operationTypes=createOperationTypeRatings(
|
||||||
|
(OperationTypeEnum.IMAGE_ANALYSE, 10)
|
||||||
|
),
|
||||||
|
version="claude-opus-4-7",
|
||||||
|
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.025
|
||||||
|
),
|
||||||
|
AiModel(
|
||||||
|
name="claude-sonnet-4-6",
|
||||||
|
displayName="Anthropic Claude Sonnet 4.6 Vision",
|
||||||
|
connectorType="anthropic",
|
||||||
|
apiUrl="https://api.anthropic.com/v1/messages",
|
||||||
|
temperature=0.2,
|
||||||
|
maxTokens=64000,
|
||||||
|
contextLength=1000000,
|
||||||
|
costPer1kTokensInput=0.003,
|
||||||
|
costPer1kTokensOutput=0.015,
|
||||||
|
speedRating=6,
|
||||||
|
qualityRating=10,
|
||||||
|
functionCall=self.callAiImage,
|
||||||
|
priority=PriorityEnum.QUALITY,
|
||||||
|
processingMode=ProcessingModeEnum.DETAILED,
|
||||||
|
operationTypes=createOperationTypeRatings(
|
||||||
|
(OperationTypeEnum.IMAGE_ANALYSE, 10)
|
||||||
|
),
|
||||||
|
version="claude-sonnet-4-6",
|
||||||
|
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.003 + (bytesReceived / 4 / 1000) * 0.015
|
||||||
|
),
|
||||||
AiModel(
|
AiModel(
|
||||||
name="claude-sonnet-4-5-20250929",
|
name="claude-sonnet-4-5-20250929",
|
||||||
displayName="Anthropic Claude Sonnet 4.5",
|
displayName="Anthropic Claude Sonnet 4.5",
|
||||||
|
|
@ -180,8 +305,11 @@ class AiAnthropic(BaseConnectorAi):
|
||||||
payload: Dict[str, Any] = {
|
payload: Dict[str, Any] = {
|
||||||
"model": model.name,
|
"model": model.name,
|
||||||
"messages": converted_messages,
|
"messages": converted_messages,
|
||||||
"temperature": temperature,
|
|
||||||
}
|
}
|
||||||
|
# Extended-Thinking models (claude-opus-4-7 etc.) reject any
|
||||||
|
# `temperature` value -- only the model default is accepted.
|
||||||
|
if _supportsCustomTemperature(model.name):
|
||||||
|
payload["temperature"] = temperature
|
||||||
|
|
||||||
# Anthropic requires max_tokens - use provided value or throw error
|
# Anthropic requires max_tokens - use provided value or throw error
|
||||||
if maxTokens is None:
|
if maxTokens is None:
|
||||||
|
|
@ -223,6 +351,7 @@ class AiAnthropic(BaseConnectorAi):
|
||||||
|
|
||||||
# Parse response
|
# Parse response
|
||||||
anthropicResponse = response.json()
|
anthropicResponse = response.json()
|
||||||
|
stop_reason = anthropicResponse.get("stop_reason")
|
||||||
|
|
||||||
# Extract content and tool_use blocks from response
|
# Extract content and tool_use blocks from response
|
||||||
content = ""
|
content = ""
|
||||||
|
|
@ -246,9 +375,25 @@ class AiAnthropic(BaseConnectorAi):
|
||||||
|
|
||||||
if not content and not toolCalls:
|
if not content and not toolCalls:
|
||||||
logger.warning(f"Anthropic API returned empty content. Full response: {anthropicResponse}")
|
logger.warning(f"Anthropic API returned empty content. Full response: {anthropicResponse}")
|
||||||
content = "[Anthropic API returned empty response]"
|
err = (
|
||||||
|
"Anthropic refused the request (content policy) — try another model or adjust the prompt."
|
||||||
|
if stop_reason == "refusal"
|
||||||
|
else f"Anthropic returned no assistant text (stop_reason={stop_reason or 'unknown'})."
|
||||||
|
)
|
||||||
|
return AiModelResponse(
|
||||||
|
content="",
|
||||||
|
success=False,
|
||||||
|
error=err,
|
||||||
|
modelId=model.name,
|
||||||
|
metadata={
|
||||||
|
"response_id": anthropicResponse.get("id", ""),
|
||||||
|
"stop_reason": stop_reason,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
metadata = {"response_id": anthropicResponse.get("id", "")}
|
metadata = {"response_id": anthropicResponse.get("id", "")}
|
||||||
|
if stop_reason:
|
||||||
|
metadata["stop_reason"] = stop_reason
|
||||||
if toolCalls:
|
if toolCalls:
|
||||||
metadata["toolCalls"] = toolCalls
|
metadata["toolCalls"] = toolCalls
|
||||||
|
|
||||||
|
|
@ -285,10 +430,11 @@ class AiAnthropic(BaseConnectorAi):
|
||||||
payload: Dict[str, Any] = {
|
payload: Dict[str, Any] = {
|
||||||
"model": model.name,
|
"model": model.name,
|
||||||
"messages": converted,
|
"messages": converted,
|
||||||
"temperature": temperature,
|
|
||||||
"max_tokens": model.maxTokens,
|
"max_tokens": model.maxTokens,
|
||||||
"stream": True,
|
"stream": True,
|
||||||
}
|
}
|
||||||
|
if _supportsCustomTemperature(model.name):
|
||||||
|
payload["temperature"] = temperature
|
||||||
if system_prompt:
|
if system_prompt:
|
||||||
payload["system"] = system_prompt
|
payload["system"] = system_prompt
|
||||||
if modelCall.tools:
|
if modelCall.tools:
|
||||||
|
|
@ -363,6 +509,19 @@ class AiAnthropic(BaseConnectorAi):
|
||||||
f"Anthropic stream returned empty response: model={model.name}, "
|
f"Anthropic stream returned empty response: model={model.name}, "
|
||||||
f"stopReason={stopReason}"
|
f"stopReason={stopReason}"
|
||||||
)
|
)
|
||||||
|
err = (
|
||||||
|
"Anthropic refused the request (content policy) — try another model or adjust the prompt."
|
||||||
|
if stopReason == "refusal"
|
||||||
|
else f"Anthropic returned no assistant text (stop_reason={stopReason or 'unknown'})."
|
||||||
|
)
|
||||||
|
yield AiModelResponse(
|
||||||
|
content="",
|
||||||
|
success=False,
|
||||||
|
error=err,
|
||||||
|
modelId=model.name,
|
||||||
|
metadata={"stopReason": stopReason} if stopReason else {},
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
metadata: Dict[str, Any] = {}
|
metadata: Dict[str, Any] = {}
|
||||||
if stopReason:
|
if stopReason:
|
||||||
|
|
@ -513,8 +672,8 @@ class AiAnthropic(BaseConnectorAi):
|
||||||
if systemPrompt:
|
if systemPrompt:
|
||||||
payload["system"] = systemPrompt
|
payload["system"] = systemPrompt
|
||||||
|
|
||||||
# Set temperature from model
|
if _supportsCustomTemperature(model.name):
|
||||||
payload["temperature"] = temperature
|
payload["temperature"] = temperature
|
||||||
|
|
||||||
# Make API call with headers from httpClient (which includes anthropic-version)
|
# Make API call with headers from httpClient (which includes anthropic-version)
|
||||||
response = await self.httpClient.post(
|
response = await self.httpClient.post(
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,30 @@ from modules.datamodels.datamodelAi import AiModel, PriorityEnum, ProcessingMode
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _supportsCustomTemperature(modelName: str) -> bool:
|
||||||
|
"""Check whether an OpenAI model accepts a custom `temperature` value.
|
||||||
|
|
||||||
|
GPT-5.x and the o-series (o1/o3/o4) reasoning models reject every
|
||||||
|
`temperature` value other than the default (1) with HTTP 400
|
||||||
|
`unsupported_value`. For these models we must omit `temperature`
|
||||||
|
from the payload entirely. Older chat-completions models
|
||||||
|
(gpt-4o, gpt-4o-mini, gpt-4.1, gpt-3.5-*) still accept any value
|
||||||
|
in [0, 2].
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if `temperature` may be sent; False if it must be omitted.
|
||||||
|
"""
|
||||||
|
if not modelName:
|
||||||
|
return True
|
||||||
|
name = modelName.lower()
|
||||||
|
if name.startswith("gpt-5"):
|
||||||
|
return False
|
||||||
|
if name.startswith("o1") or name.startswith("o3") or name.startswith("o4"):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def loadConfigData():
|
def loadConfigData():
|
||||||
"""Load configuration data for OpenAI connector"""
|
"""Load configuration data for OpenAI connector"""
|
||||||
return {
|
return {
|
||||||
|
|
@ -123,6 +147,135 @@ class AiOpenai(BaseConnectorAi):
|
||||||
version="gpt-4o",
|
version="gpt-4o",
|
||||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0025 + (bytesReceived / 4 / 1000) * 0.01
|
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0025 + (bytesReceived / 4 / 1000) * 0.01
|
||||||
),
|
),
|
||||||
|
AiModel(
|
||||||
|
name="gpt-5.5",
|
||||||
|
displayName="OpenAI GPT-5.5",
|
||||||
|
connectorType="openai",
|
||||||
|
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||||
|
temperature=0.2,
|
||||||
|
maxTokens=128000,
|
||||||
|
contextLength=1050000,
|
||||||
|
costPer1kTokensInput=0.005, # $5/M tokens (OpenAI API, 2026-04)
|
||||||
|
costPer1kTokensOutput=0.03, # $30/M tokens
|
||||||
|
speedRating=8,
|
||||||
|
qualityRating=10,
|
||||||
|
functionCall=self.callAiBasic,
|
||||||
|
functionCallStream=self.callAiBasicStream,
|
||||||
|
priority=PriorityEnum.QUALITY,
|
||||||
|
processingMode=ProcessingModeEnum.DETAILED,
|
||||||
|
operationTypes=createOperationTypeRatings(
|
||||||
|
(OperationTypeEnum.PLAN, 10),
|
||||||
|
(OperationTypeEnum.DATA_ANALYSE, 10),
|
||||||
|
(OperationTypeEnum.DATA_GENERATE, 10),
|
||||||
|
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||||
|
(OperationTypeEnum.AGENT, 10),
|
||||||
|
(OperationTypeEnum.DATA_QUERY, 8),
|
||||||
|
),
|
||||||
|
version="gpt-5.5",
|
||||||
|
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.03
|
||||||
|
),
|
||||||
|
AiModel(
|
||||||
|
name="gpt-5.4",
|
||||||
|
displayName="OpenAI GPT-5.4",
|
||||||
|
connectorType="openai",
|
||||||
|
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||||
|
temperature=0.2,
|
||||||
|
maxTokens=128000,
|
||||||
|
contextLength=1050000,
|
||||||
|
costPer1kTokensInput=0.0025, # $2.50/M tokens
|
||||||
|
costPer1kTokensOutput=0.015, # $15/M tokens
|
||||||
|
speedRating=8,
|
||||||
|
qualityRating=10,
|
||||||
|
functionCall=self.callAiBasic,
|
||||||
|
functionCallStream=self.callAiBasicStream,
|
||||||
|
priority=PriorityEnum.BALANCED,
|
||||||
|
processingMode=ProcessingModeEnum.ADVANCED,
|
||||||
|
operationTypes=createOperationTypeRatings(
|
||||||
|
(OperationTypeEnum.PLAN, 9),
|
||||||
|
(OperationTypeEnum.DATA_ANALYSE, 10),
|
||||||
|
(OperationTypeEnum.DATA_GENERATE, 10),
|
||||||
|
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||||
|
(OperationTypeEnum.AGENT, 9),
|
||||||
|
(OperationTypeEnum.DATA_QUERY, 8),
|
||||||
|
),
|
||||||
|
version="gpt-5.4",
|
||||||
|
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0025 + (bytesReceived / 4 / 1000) * 0.015
|
||||||
|
),
|
||||||
|
AiModel(
|
||||||
|
name="gpt-5.4-mini",
|
||||||
|
displayName="OpenAI GPT-5.4 Mini",
|
||||||
|
connectorType="openai",
|
||||||
|
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||||
|
temperature=0.2,
|
||||||
|
maxTokens=128000,
|
||||||
|
contextLength=400000,
|
||||||
|
costPer1kTokensInput=0.00075, # $0.75/M tokens
|
||||||
|
costPer1kTokensOutput=0.0045, # $4.50/M tokens
|
||||||
|
speedRating=9,
|
||||||
|
qualityRating=9,
|
||||||
|
functionCall=self.callAiBasic,
|
||||||
|
functionCallStream=self.callAiBasicStream,
|
||||||
|
priority=PriorityEnum.SPEED,
|
||||||
|
processingMode=ProcessingModeEnum.BASIC,
|
||||||
|
operationTypes=createOperationTypeRatings(
|
||||||
|
(OperationTypeEnum.PLAN, 8),
|
||||||
|
(OperationTypeEnum.DATA_ANALYSE, 9),
|
||||||
|
(OperationTypeEnum.DATA_GENERATE, 9),
|
||||||
|
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||||
|
(OperationTypeEnum.AGENT, 8),
|
||||||
|
(OperationTypeEnum.DATA_QUERY, 10),
|
||||||
|
),
|
||||||
|
version="gpt-5.4-mini",
|
||||||
|
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.00075 + (bytesReceived / 4 / 1000) * 0.0045
|
||||||
|
),
|
||||||
|
AiModel(
|
||||||
|
name="gpt-5.4-nano",
|
||||||
|
displayName="OpenAI GPT-5.4 Nano",
|
||||||
|
connectorType="openai",
|
||||||
|
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||||
|
temperature=0.2,
|
||||||
|
maxTokens=128000,
|
||||||
|
contextLength=400000,
|
||||||
|
costPer1kTokensInput=0.0002, # $0.20/M tokens
|
||||||
|
costPer1kTokensOutput=0.00125, # $1.25/M tokens
|
||||||
|
speedRating=10,
|
||||||
|
qualityRating=7,
|
||||||
|
functionCall=self.callAiBasic,
|
||||||
|
functionCallStream=self.callAiBasicStream,
|
||||||
|
priority=PriorityEnum.COST,
|
||||||
|
processingMode=ProcessingModeEnum.BASIC,
|
||||||
|
operationTypes=createOperationTypeRatings(
|
||||||
|
(OperationTypeEnum.PLAN, 7),
|
||||||
|
(OperationTypeEnum.DATA_ANALYSE, 7),
|
||||||
|
(OperationTypeEnum.DATA_GENERATE, 8),
|
||||||
|
(OperationTypeEnum.DATA_EXTRACT, 9),
|
||||||
|
(OperationTypeEnum.AGENT, 7),
|
||||||
|
(OperationTypeEnum.DATA_QUERY, 10),
|
||||||
|
),
|
||||||
|
version="gpt-5.4-nano",
|
||||||
|
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0002 + (bytesReceived / 4 / 1000) * 0.00125
|
||||||
|
),
|
||||||
|
AiModel(
|
||||||
|
name="gpt-5.5",
|
||||||
|
displayName="OpenAI GPT-5.5 Vision",
|
||||||
|
connectorType="openai",
|
||||||
|
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||||
|
temperature=0.2,
|
||||||
|
maxTokens=128000,
|
||||||
|
contextLength=1050000,
|
||||||
|
costPer1kTokensInput=0.005,
|
||||||
|
costPer1kTokensOutput=0.03,
|
||||||
|
speedRating=6,
|
||||||
|
qualityRating=10,
|
||||||
|
functionCall=self.callAiImage,
|
||||||
|
priority=PriorityEnum.QUALITY,
|
||||||
|
processingMode=ProcessingModeEnum.DETAILED,
|
||||||
|
operationTypes=createOperationTypeRatings(
|
||||||
|
(OperationTypeEnum.IMAGE_ANALYSE, 10)
|
||||||
|
),
|
||||||
|
version="gpt-5.5",
|
||||||
|
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.03
|
||||||
|
),
|
||||||
AiModel(
|
AiModel(
|
||||||
name="text-embedding-3-small",
|
name="text-embedding-3-small",
|
||||||
displayName="OpenAI Embedding Small",
|
displayName="OpenAI Embedding Small",
|
||||||
|
|
@ -215,9 +368,17 @@ class AiOpenai(BaseConnectorAi):
|
||||||
payload = {
|
payload = {
|
||||||
"model": model.name,
|
"model": model.name,
|
||||||
"messages": messages,
|
"messages": messages,
|
||||||
"temperature": temperature,
|
# Universal output-length cap. `max_tokens` is deprecated and
|
||||||
"max_tokens": maxTokens
|
# rejected outright by gpt-5.x / o-series; `max_completion_tokens`
|
||||||
|
# is accepted by every current chat-completions model (legacy
|
||||||
|
# gpt-4o, gpt-4.1, gpt-5.x, o1/o3/o4) per OpenAI API reference.
|
||||||
|
"max_completion_tokens": maxTokens
|
||||||
}
|
}
|
||||||
|
# gpt-5.x and o-series only accept the default temperature (1) and
|
||||||
|
# return HTTP 400 `unsupported_value` for anything else - omit the
|
||||||
|
# field entirely for those models.
|
||||||
|
if _supportsCustomTemperature(model.name):
|
||||||
|
payload["temperature"] = temperature
|
||||||
|
|
||||||
if modelCall.tools:
|
if modelCall.tools:
|
||||||
payload["tools"] = modelCall.tools
|
payload["tools"] = modelCall.tools
|
||||||
|
|
@ -295,10 +456,15 @@ class AiOpenai(BaseConnectorAi):
|
||||||
payload: Dict[str, Any] = {
|
payload: Dict[str, Any] = {
|
||||||
"model": model.name,
|
"model": model.name,
|
||||||
"messages": messages,
|
"messages": messages,
|
||||||
"temperature": temperature,
|
# See callAiBasic for the rationale: `max_completion_tokens`
|
||||||
"max_tokens": model.maxTokens,
|
# is the universal output-length parameter; `max_tokens` is
|
||||||
|
# deprecated and rejected by gpt-5.x / o-series.
|
||||||
|
"max_completion_tokens": model.maxTokens,
|
||||||
"stream": True,
|
"stream": True,
|
||||||
}
|
}
|
||||||
|
if _supportsCustomTemperature(model.name):
|
||||||
|
payload["temperature"] = temperature
|
||||||
|
|
||||||
if modelCall.tools:
|
if modelCall.tools:
|
||||||
payload["tools"] = modelCall.tools
|
payload["tools"] = modelCall.tools
|
||||||
payload["tool_choice"] = modelCall.toolChoice or "auto"
|
payload["tool_choice"] = modelCall.toolChoice or "auto"
|
||||||
|
|
@ -449,15 +615,15 @@ class AiOpenai(BaseConnectorAi):
|
||||||
# Use the messages directly - they should already contain the image data
|
# Use the messages directly - they should already contain the image data
|
||||||
# in the format: {"type": "image_url", "image_url": {"url": "data:...base64,..."}}
|
# in the format: {"type": "image_url", "image_url": {"url": "data:...base64,..."}}
|
||||||
|
|
||||||
# Use parameters from model
|
|
||||||
temperature = model.temperature
|
temperature = model.temperature
|
||||||
# Don't set maxTokens - let the model use its full context length
|
# Don't set maxTokens - let the model use its full context length
|
||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"model": model.name,
|
"model": model.name,
|
||||||
"messages": messages,
|
"messages": messages,
|
||||||
"temperature": temperature
|
|
||||||
}
|
}
|
||||||
|
if _supportsCustomTemperature(model.name):
|
||||||
|
payload["temperature"] = temperature
|
||||||
|
|
||||||
response = await self.httpClient.post(
|
response = await self.httpClient.post(
|
||||||
model.apiUrl,
|
model.apiUrl,
|
||||||
|
|
|
||||||
|
|
@ -9,13 +9,15 @@ googleAuthScopes = [
|
||||||
"https://www.googleapis.com/auth/userinfo.profile",
|
"https://www.googleapis.com/auth/userinfo.profile",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Google — Data app (Gmail + Drive + identity for token responses)
|
# Google — Data app (Gmail + Drive + Calendar + Contacts + identity for token responses)
|
||||||
googleDataScopes = [
|
googleDataScopes = [
|
||||||
"openid",
|
"openid",
|
||||||
"https://www.googleapis.com/auth/userinfo.email",
|
"https://www.googleapis.com/auth/userinfo.email",
|
||||||
"https://www.googleapis.com/auth/userinfo.profile",
|
"https://www.googleapis.com/auth/userinfo.profile",
|
||||||
"https://www.googleapis.com/auth/gmail.readonly",
|
"https://www.googleapis.com/auth/gmail.readonly",
|
||||||
"https://www.googleapis.com/auth/drive.readonly",
|
"https://www.googleapis.com/auth/drive.readonly",
|
||||||
|
"https://www.googleapis.com/auth/calendar.readonly",
|
||||||
|
"https://www.googleapis.com/auth/contacts.readonly",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Microsoft — Auth app: Graph profile only (MSAL adds openid, profile, offline_access, …)
|
# Microsoft — Auth app: Graph profile only (MSAL adds openid, profile, offline_access, …)
|
||||||
|
|
@ -34,9 +36,18 @@ msftDataScopes = [
|
||||||
"OnlineMeetings.Read",
|
"OnlineMeetings.Read",
|
||||||
"Chat.ReadWrite",
|
"Chat.ReadWrite",
|
||||||
"ChatMessage.Send",
|
"ChatMessage.Send",
|
||||||
|
"Calendars.Read",
|
||||||
|
"Contacts.Read",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def msftDataScopesForRefresh() -> str:
|
def msftDataScopesForRefresh() -> str:
|
||||||
"""Space-separated scope string identical to authorization request (Token v2 refresh)."""
|
"""Space-separated scope string identical to authorization request (Token v2 refresh)."""
|
||||||
return " ".join(msftDataScopes)
|
return " ".join(msftDataScopes)
|
||||||
|
|
||||||
|
|
||||||
|
# Infomaniak intentionally has no OAuth scope set: the kDrive + Mail data APIs
|
||||||
|
# are only reachable with manually issued Personal Access Tokens (see
|
||||||
|
# wiki/d-guides/infomaniak-token-setup.md). The OAuth /authorize endpoint at
|
||||||
|
# login.infomaniak.com only accepts identity scopes (openid/profile/email/phone)
|
||||||
|
# and does not return tokens that work against /1/* data routes.
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,7 @@ class TokenManager:
|
||||||
self.google_client_id = APP_CONFIG.get("Service_GOOGLE_DATA_CLIENT_ID")
|
self.google_client_id = APP_CONFIG.get("Service_GOOGLE_DATA_CLIENT_ID")
|
||||||
self.google_client_secret = APP_CONFIG.get("Service_GOOGLE_DATA_CLIENT_SECRET")
|
self.google_client_secret = APP_CONFIG.get("Service_GOOGLE_DATA_CLIENT_SECRET")
|
||||||
|
|
||||||
|
|
||||||
def refreshMicrosoftToken(self, refreshToken: str, userId: str, oldToken: Token) -> Optional[Token]:
|
def refreshMicrosoftToken(self, refreshToken: str, userId: str, oldToken: Token) -> Optional[Token]:
|
||||||
"""Refresh Microsoft OAuth token using refresh token"""
|
"""Refresh Microsoft OAuth token using refresh token"""
|
||||||
try:
|
try:
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,47 @@ logger = logging.getLogger(__name__)
|
||||||
# No mapping needed - table name = Pydantic model name exactly
|
# No mapping needed - table name = Pydantic model name exactly
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseQueryError(RuntimeError):
|
||||||
|
"""Raised by DB read methods when the underlying SQL query failed.
|
||||||
|
|
||||||
|
Empty result sets do NOT raise this — they return ``[]`` / ``None`` /
|
||||||
|
``{"items": [], "totalItems": 0, "totalPages": 0}`` as before. This
|
||||||
|
exception is reserved for **real** failures: psycopg2 ProgrammingError,
|
||||||
|
DataError, OperationalError, IntegrityError, plus any unexpected
|
||||||
|
Python error raised inside a query path.
|
||||||
|
|
||||||
|
Read methods used to silently swallow such errors and return empty
|
||||||
|
collections, which made every caller incapable of distinguishing
|
||||||
|
"no rows" from "broken query / type adapter / dropped column / lost
|
||||||
|
connection". That hid concrete bugs (e.g. dict passed where Postgres
|
||||||
|
expected a UUID string) behind misleading downstream "no record found"
|
||||||
|
errors.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, table: str, message: str, original: BaseException = None):
|
||||||
|
super().__init__(f"{table}: {message}")
|
||||||
|
self.table = table
|
||||||
|
self.original = original
|
||||||
|
|
||||||
|
|
||||||
|
def _rollbackQuietly(connection) -> None:
|
||||||
|
"""Restore the connection state after a failed query.
|
||||||
|
|
||||||
|
Postgres puts the connection in an error state after any failed
|
||||||
|
statement; subsequent queries on the same connection raise
|
||||||
|
``InFailedSqlTransaction`` until we rollback. We swallow rollback
|
||||||
|
errors because the original query error is what the caller should
|
||||||
|
see — a secondary rollback failure typically means the connection
|
||||||
|
is gone and will be reopened on the next ``_ensure_connection``.
|
||||||
|
"""
|
||||||
|
if connection is None:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
connection.rollback()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SystemTable(PowerOnModel):
|
class SystemTable(PowerOnModel):
|
||||||
"""Data model for system table entries"""
|
"""Data model for system table entries"""
|
||||||
|
|
||||||
|
|
@ -76,7 +117,7 @@ def _isJsonbType(fieldType) -> bool:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _get_model_fields(model_class) -> Dict[str, str]:
|
def getModelFields(model_class) -> Dict[str, str]:
|
||||||
"""Get all fields from Pydantic model and map to SQL types.
|
"""Get all fields from Pydantic model and map to SQL types.
|
||||||
|
|
||||||
Supports explicit db_type override via json_schema_extra={"db_type": "vector(1536)"}.
|
Supports explicit db_type override via json_schema_extra={"db_type": "vector(1536)"}.
|
||||||
|
|
@ -121,22 +162,7 @@ def _get_model_fields(model_class) -> Dict[str, str]:
|
||||||
return fields
|
return fields
|
||||||
|
|
||||||
|
|
||||||
def _get_fk_sort_meta(model_class) -> Dict[str, Dict[str, str]]:
|
def parseRecordFields(record: Dict[str, Any], fields: Dict[str, str], context: str = "") -> None:
|
||||||
"""Map FK field name -> {model, labelField} from json_schema_extra (fk_model + frontend_fk_display_field)."""
|
|
||||||
result: Dict[str, Dict[str, str]] = {}
|
|
||||||
for name, field_info in model_class.model_fields.items():
|
|
||||||
extra = field_info.json_schema_extra
|
|
||||||
if not extra or not isinstance(extra, dict):
|
|
||||||
continue
|
|
||||||
fk_model = extra.get("fk_model")
|
|
||||||
label_field = extra.get("frontend_fk_display_field")
|
|
||||||
if fk_model and label_field:
|
|
||||||
result[name] = {"model": str(fk_model), "labelField": str(label_field)}
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def _parseRecordFields(record: Dict[str, Any], fields: Dict[str, str], context: str = "") -> None:
|
|
||||||
"""Parse record fields in-place: numeric typing, vector parsing, JSONB deserialization."""
|
"""Parse record fields in-place: numeric typing, vector parsing, JSONB deserialization."""
|
||||||
import json as _json
|
import json as _json
|
||||||
|
|
||||||
|
|
@ -189,7 +215,7 @@ _current_user_id: contextvars.ContextVar[Optional[str]] = contextvars.ContextVar
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _get_cached_connector(
|
def getCachedConnector(
|
||||||
dbHost: str,
|
dbHost: str,
|
||||||
dbDatabase: str,
|
dbDatabase: str,
|
||||||
dbUser: str = None,
|
dbUser: str = None,
|
||||||
|
|
@ -553,7 +579,7 @@ class DatabaseConnector:
|
||||||
}
|
}
|
||||||
|
|
||||||
# Desired columns based on model
|
# Desired columns based on model
|
||||||
model_fields = _get_model_fields(model_class)
|
model_fields = getModelFields(model_class)
|
||||||
desired_columns = set(["id"]) | set(model_fields.keys())
|
desired_columns = set(["id"]) | set(model_fields.keys())
|
||||||
|
|
||||||
# Add missing columns
|
# Add missing columns
|
||||||
|
|
@ -576,29 +602,48 @@ class DatabaseConnector:
|
||||||
f"Could not add column '{col}' to '{table}': {add_err}"
|
f"Could not add column '{col}' to '{table}': {add_err}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Targeted type-downgrade: if a model field has been
|
# Column type migrations for existing tables.
|
||||||
# changed from a structured type (JSONB) to a plain
|
# TEXT→DOUBLE PRECISION handles three value shapes:
|
||||||
# TEXT field, alter the column so writes don't fail.
|
# 1. NULL / empty string → NULL
|
||||||
# JSONB -> TEXT is a safe, lossless cast (JSONB is
|
# 2. ISO date(time) like "2025-01-22" or "2025-01-22T10:00:00+00" → epoch via EXTRACT
|
||||||
# rendered as its JSON-text representation; the
|
# 3. Plain numeric string like "3.14" → direct cast
|
||||||
# corresponding Pydantic ``@field_validator`` is
|
_TEXT_TO_DOUBLE = (
|
||||||
# responsible for re-decoding legacy data on read).
|
'DOUBLE PRECISION USING CASE'
|
||||||
|
' WHEN "{col}" IS NULL OR "{col}" = \'\' THEN NULL'
|
||||||
|
' WHEN "{col}" ~ \'^\\d{4}-\\d{2}-\\d{2}\''
|
||||||
|
' THEN EXTRACT(EPOCH FROM "{col}"::timestamptz)'
|
||||||
|
' ELSE NULLIF("{col}", \'\')::double precision'
|
||||||
|
' END'
|
||||||
|
)
|
||||||
|
_SAFE_TYPE_CHANGES = {
|
||||||
|
("jsonb", "TEXT"): "TEXT USING \"{col}\"::text",
|
||||||
|
("text", "DOUBLE PRECISION"): _TEXT_TO_DOUBLE,
|
||||||
|
("text", "INTEGER"): "INTEGER USING NULLIF(\"{col}\", '')::integer",
|
||||||
|
("timestamp without time zone", "DOUBLE PRECISION"): 'DOUBLE PRECISION USING EXTRACT(EPOCH FROM "{col}" AT TIME ZONE \'UTC\')',
|
||||||
|
("timestamp with time zone", "DOUBLE PRECISION"): 'DOUBLE PRECISION USING EXTRACT(EPOCH FROM "{col}")',
|
||||||
|
("date", "DOUBLE PRECISION"): 'DOUBLE PRECISION USING EXTRACT(EPOCH FROM "{col}"::timestamp AT TIME ZONE \'UTC\')',
|
||||||
|
}
|
||||||
for col in sorted(desired_columns & existing_columns):
|
for col in sorted(desired_columns & existing_columns):
|
||||||
if col == "id":
|
if col == "id":
|
||||||
continue
|
continue
|
||||||
desired_sql = (model_fields.get(col) or "").upper()
|
desired_sql = (model_fields.get(col) or "").upper()
|
||||||
currentType = existing_column_types.get(col, "")
|
currentType = existing_column_types.get(col, "")
|
||||||
if desired_sql == "TEXT" and currentType == "jsonb":
|
migration = _SAFE_TYPE_CHANGES.get((currentType, desired_sql))
|
||||||
|
if migration:
|
||||||
|
castExpr = migration.replace("{col}", col)
|
||||||
try:
|
try:
|
||||||
|
cursor.execute('SAVEPOINT col_migrate')
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
f'ALTER TABLE "{table}" ALTER COLUMN "{col}" TYPE TEXT USING "{col}"::text'
|
f'ALTER TABLE "{table}" ALTER COLUMN "{col}" TYPE {castExpr}'
|
||||||
)
|
)
|
||||||
|
cursor.execute('RELEASE SAVEPOINT col_migrate')
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Downgraded column '{col}' from JSONB to TEXT on '{table}'"
|
f"Migrated column '{col}' from {currentType} to {desired_sql} on '{table}'"
|
||||||
)
|
)
|
||||||
except Exception as alter_err:
|
except Exception as alter_err:
|
||||||
|
cursor.execute('ROLLBACK TO SAVEPOINT col_migrate')
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"Could not downgrade column '{col}' on '{table}': {alter_err}"
|
f"Could not migrate column '{col}' on '{table}': {alter_err}"
|
||||||
)
|
)
|
||||||
except Exception as ensure_err:
|
except Exception as ensure_err:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
|
|
@ -633,7 +678,7 @@ class DatabaseConnector:
|
||||||
|
|
||||||
def _create_table_from_model(self, cursor, table: str, model_class: type) -> None:
|
def _create_table_from_model(self, cursor, table: str, model_class: type) -> None:
|
||||||
"""Create table with columns matching Pydantic model fields."""
|
"""Create table with columns matching Pydantic model fields."""
|
||||||
fields = _get_model_fields(model_class)
|
fields = getModelFields(model_class)
|
||||||
|
|
||||||
# Enable pgvector if any field uses vector type
|
# Enable pgvector if any field uses vector type
|
||||||
if any(_isVectorType(sqlType) for sqlType in fields.values()):
|
if any(_isVectorType(sqlType) for sqlType in fields.values()):
|
||||||
|
|
@ -666,7 +711,7 @@ class DatabaseConnector:
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Save record to normalized table with explicit columns."""
|
"""Save record to normalized table with explicit columns."""
|
||||||
# Get columns from Pydantic model instead of database schema
|
# Get columns from Pydantic model instead of database schema
|
||||||
fields = _get_model_fields(model_class)
|
fields = getModelFields(model_class)
|
||||||
columns = ["id"] + [field for field in fields.keys() if field != "id"]
|
columns = ["id"] + [field for field in fields.keys() if field != "id"]
|
||||||
|
|
||||||
if not columns:
|
if not columns:
|
||||||
|
|
@ -751,14 +796,15 @@ class DatabaseConnector:
|
||||||
|
|
||||||
# Convert row to dict and handle JSONB fields
|
# Convert row to dict and handle JSONB fields
|
||||||
record = dict(row)
|
record = dict(row)
|
||||||
fields = _get_model_fields(model_class)
|
fields = getModelFields(model_class)
|
||||||
|
|
||||||
_parseRecordFields(record, fields, f"record {recordId}")
|
parseRecordFields(record, fields, f"record {recordId}")
|
||||||
|
|
||||||
return record
|
return record
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error loading record {recordId} from table {table}: {e}")
|
logger.error(f"Error loading record {recordId} from table {table}: {e}")
|
||||||
return None
|
_rollbackQuietly(getattr(self, "connection", None))
|
||||||
|
raise DatabaseQueryError(table, str(e), original=e) from e
|
||||||
|
|
||||||
def getRecord(self, model_class: type, recordId: str) -> Optional[Dict[str, Any]]:
|
def getRecord(self, model_class: type, recordId: str) -> Optional[Dict[str, Any]]:
|
||||||
"""Load one row by primary key (routes / services; wraps _loadRecord)."""
|
"""Load one row by primary key (routes / services; wraps _loadRecord)."""
|
||||||
|
|
@ -788,7 +834,10 @@ class DatabaseConnector:
|
||||||
createdTs = record.get("sysCreatedAt")
|
createdTs = record.get("sysCreatedAt")
|
||||||
if createdTs is None or createdTs == 0 or createdTs == 0.0:
|
if createdTs is None or createdTs == 0 or createdTs == 0.0:
|
||||||
record["sysCreatedAt"] = currentTime
|
record["sysCreatedAt"] = currentTime
|
||||||
if effective_user_id:
|
# Do not wipe caller-provided sysCreatedBy (e.g. FileItem from createFile with
|
||||||
|
# real user). ContextVar can be "system" for the DB pool while the business
|
||||||
|
# user is set on the record from model_dump().
|
||||||
|
if effective_user_id and not record.get("sysCreatedBy"):
|
||||||
record["sysCreatedBy"] = effective_user_id
|
record["sysCreatedBy"] = effective_user_id
|
||||||
elif not record.get("sysCreatedBy"):
|
elif not record.get("sysCreatedBy"):
|
||||||
if effective_user_id:
|
if effective_user_id:
|
||||||
|
|
@ -822,10 +871,10 @@ class DatabaseConnector:
|
||||||
cursor.execute(f'SELECT * FROM "{table}" ORDER BY "id"')
|
cursor.execute(f'SELECT * FROM "{table}" ORDER BY "id"')
|
||||||
records = [dict(row) for row in cursor.fetchall()]
|
records = [dict(row) for row in cursor.fetchall()]
|
||||||
|
|
||||||
fields = _get_model_fields(model_class)
|
fields = getModelFields(model_class)
|
||||||
modelFields = model_class.model_fields
|
modelFields = model_class.model_fields
|
||||||
for record in records:
|
for record in records:
|
||||||
_parseRecordFields(record, fields, f"table {table}")
|
parseRecordFields(record, fields, f"table {table}")
|
||||||
# Set type-aware defaults for NULL JSONB fields
|
# Set type-aware defaults for NULL JSONB fields
|
||||||
for fieldName, fieldType in fields.items():
|
for fieldName, fieldType in fields.items():
|
||||||
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
|
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
|
||||||
|
|
@ -844,7 +893,8 @@ class DatabaseConnector:
|
||||||
return records
|
return records
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error loading table {table}: {e}")
|
logger.error(f"Error loading table {table}: {e}")
|
||||||
return []
|
_rollbackQuietly(getattr(self, "connection", None))
|
||||||
|
raise DatabaseQueryError(table, str(e), original=e) from e
|
||||||
|
|
||||||
def _registerInitialId(self, table: str, initialId: str) -> bool:
|
def _registerInitialId(self, table: str, initialId: str) -> bool:
|
||||||
"""Registers the initial ID for a table."""
|
"""Registers the initial ID for a table."""
|
||||||
|
|
@ -1011,10 +1061,10 @@ class DatabaseConnector:
|
||||||
cursor.execute(query, where_values)
|
cursor.execute(query, where_values)
|
||||||
records = [dict(row) for row in cursor.fetchall()]
|
records = [dict(row) for row in cursor.fetchall()]
|
||||||
|
|
||||||
fields = _get_model_fields(model_class)
|
fields = getModelFields(model_class)
|
||||||
modelFields = model_class.model_fields
|
modelFields = model_class.model_fields
|
||||||
for record in records:
|
for record in records:
|
||||||
_parseRecordFields(record, fields, f"table {table}")
|
parseRecordFields(record, fields, f"table {table}")
|
||||||
for fieldName, fieldType in fields.items():
|
for fieldName, fieldType in fields.items():
|
||||||
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
|
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
|
||||||
fieldInfo = modelFields.get(fieldName)
|
fieldInfo = modelFields.get(fieldName)
|
||||||
|
|
@ -1043,7 +1093,8 @@ class DatabaseConnector:
|
||||||
return records
|
return records
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error loading records from table {table}: {e}")
|
logger.error(f"Error loading records from table {table}: {e}")
|
||||||
return []
|
_rollbackQuietly(getattr(self, "connection", None))
|
||||||
|
raise DatabaseQueryError(table, str(e), original=e) from e
|
||||||
|
|
||||||
def _buildPaginationClauses(
|
def _buildPaginationClauses(
|
||||||
self,
|
self,
|
||||||
|
|
@ -1055,7 +1106,7 @@ class DatabaseConnector:
|
||||||
Translate PaginationParams + recordFilter into SQL clauses.
|
Translate PaginationParams + recordFilter into SQL clauses.
|
||||||
Returns (where_clause, order_clause, limit_clause, values, count_values).
|
Returns (where_clause, order_clause, limit_clause, values, count_values).
|
||||||
"""
|
"""
|
||||||
fields = _get_model_fields(model_class)
|
fields = getModelFields(model_class)
|
||||||
validColumns = set(fields.keys())
|
validColumns = set(fields.keys())
|
||||||
|
|
||||||
where_parts: List[str] = []
|
where_parts: List[str] = []
|
||||||
|
|
@ -1111,8 +1162,15 @@ class DatabaseConnector:
|
||||||
values.append(f"%{v}")
|
values.append(f"%{v}")
|
||||||
elif op in ("gt", "gte", "lt", "lte"):
|
elif op in ("gt", "gte", "lt", "lte"):
|
||||||
sqlOp = {"gt": ">", "gte": ">=", "lt": "<", "lte": "<="}[op]
|
sqlOp = {"gt": ">", "gte": ">=", "lt": "<", "lte": "<="}[op]
|
||||||
where_parts.append(f'"{key}"::TEXT {sqlOp} %s')
|
if colType in ("INTEGER", "DOUBLE PRECISION"):
|
||||||
values.append(str(v))
|
try:
|
||||||
|
where_parts.append(f'"{key}"::double precision {sqlOp} %s')
|
||||||
|
values.append(float(v))
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
where_parts.append(f'"{key}"::TEXT {sqlOp} %s')
|
||||||
|
values.append(str(v))
|
||||||
elif op == "between":
|
elif op == "between":
|
||||||
fromVal = v.get("from", "") if isinstance(v, dict) else ""
|
fromVal = v.get("from", "") if isinstance(v, dict) else ""
|
||||||
toVal = v.get("to", "") if isinstance(v, dict) else ""
|
toVal = v.get("to", "") if isinstance(v, dict) else ""
|
||||||
|
|
@ -1137,6 +1195,21 @@ class DatabaseConnector:
|
||||||
toTs = _dt.strptime(str(toVal), '%Y-%m-%d').replace(hour=23, minute=59, second=59, tzinfo=_tz.utc).timestamp()
|
toTs = _dt.strptime(str(toVal), '%Y-%m-%d').replace(hour=23, minute=59, second=59, tzinfo=_tz.utc).timestamp()
|
||||||
where_parts.append(f'"{key}" <= %s')
|
where_parts.append(f'"{key}" <= %s')
|
||||||
values.append(toTs)
|
values.append(toTs)
|
||||||
|
elif isNumericCol:
|
||||||
|
try:
|
||||||
|
if fromVal and toVal:
|
||||||
|
where_parts.append(
|
||||||
|
f'"{key}"::double precision >= %s AND "{key}"::double precision <= %s'
|
||||||
|
)
|
||||||
|
values.extend([float(fromVal), float(toVal)])
|
||||||
|
elif fromVal:
|
||||||
|
where_parts.append(f'"{key}"::double precision >= %s')
|
||||||
|
values.append(float(fromVal))
|
||||||
|
elif toVal:
|
||||||
|
where_parts.append(f'"{key}"::double precision <= %s')
|
||||||
|
values.append(float(toVal))
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
continue
|
||||||
else:
|
else:
|
||||||
if fromVal and toVal:
|
if fromVal and toVal:
|
||||||
where_parts.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s')
|
where_parts.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s')
|
||||||
|
|
@ -1214,10 +1287,10 @@ class DatabaseConnector:
|
||||||
cursor.execute(dataSql, values)
|
cursor.execute(dataSql, values)
|
||||||
records = [dict(row) for row in cursor.fetchall()]
|
records = [dict(row) for row in cursor.fetchall()]
|
||||||
|
|
||||||
fields = _get_model_fields(model_class)
|
fields = getModelFields(model_class)
|
||||||
modelFields = model_class.model_fields
|
modelFields = model_class.model_fields
|
||||||
for record in records:
|
for record in records:
|
||||||
_parseRecordFields(record, fields, f"table {table}")
|
parseRecordFields(record, fields, f"table {table}")
|
||||||
for fieldName, fieldType in fields.items():
|
for fieldName, fieldType in fields.items():
|
||||||
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
|
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
|
||||||
fieldInfo = modelFields.get(fieldName)
|
fieldInfo = modelFields.get(fieldName)
|
||||||
|
|
@ -1235,13 +1308,17 @@ class DatabaseConnector:
|
||||||
if fieldFilter and isinstance(fieldFilter, list):
|
if fieldFilter and isinstance(fieldFilter, list):
|
||||||
records = [{f: r[f] for f in fieldFilter if f in r} for r in records]
|
records = [{f: r[f] for f in fieldFilter if f in r} for r in records]
|
||||||
|
|
||||||
pageSize = pagination.pageSize if pagination else max(totalItems, 1)
|
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||||
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
|
enrichRowsWithFkLabels(records, model_class)
|
||||||
|
|
||||||
return {"items": records, "totalItems": totalItems, "totalPages": totalPages}
|
pageSize = pagination.pageSize if pagination else max(totalItems, 1)
|
||||||
|
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
|
||||||
|
|
||||||
|
return {"items": records, "totalItems": totalItems, "totalPages": totalPages}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error in getRecordsetPaginated for table {table}: {e}")
|
logger.error(f"Error in getRecordsetPaginated for table {table}: {e}")
|
||||||
return {"items": [], "totalItems": 0, "totalPages": 0}
|
_rollbackQuietly(getattr(self, "connection", None))
|
||||||
|
raise DatabaseQueryError(table, str(e), original=e) from e
|
||||||
|
|
||||||
def getDistinctColumnValues(
|
def getDistinctColumnValues(
|
||||||
self,
|
self,
|
||||||
|
|
@ -1249,13 +1326,18 @@ class DatabaseConnector:
|
||||||
column: str,
|
column: str,
|
||||||
pagination=None,
|
pagination=None,
|
||||||
recordFilter: Dict[str, Any] = None,
|
recordFilter: Dict[str, Any] = None,
|
||||||
) -> List[str]:
|
includeEmpty: bool = True,
|
||||||
"""
|
) -> List[Optional[str]]:
|
||||||
Returns sorted distinct non-null values for a column using SQL DISTINCT.
|
"""Return sorted distinct values for a column using SQL DISTINCT.
|
||||||
|
|
||||||
|
When ``includeEmpty`` is True (default), NULL and empty-string rows are
|
||||||
|
represented as a single ``None`` entry at the end of the list — this
|
||||||
|
allows the frontend to offer a "(Leer)" filter option.
|
||||||
|
|
||||||
Applies cross-filtering (all filters except the requested column).
|
Applies cross-filtering (all filters except the requested column).
|
||||||
"""
|
"""
|
||||||
table = model_class.__name__
|
table = model_class.__name__
|
||||||
fields = _get_model_fields(model_class)
|
fields = getModelFields(model_class)
|
||||||
|
|
||||||
if column not in fields:
|
if column not in fields:
|
||||||
return []
|
return []
|
||||||
|
|
@ -1274,21 +1356,32 @@ class DatabaseConnector:
|
||||||
where_clause, _, _, values, _ = \
|
where_clause, _, _, values, _ = \
|
||||||
self._buildPaginationClauses(model_class, pagination, recordFilter)
|
self._buildPaginationClauses(model_class, pagination, recordFilter)
|
||||||
|
|
||||||
sql = (
|
nonNullCond = f'"{column}" IS NOT NULL AND "{column}"::TEXT != \'\''
|
||||||
f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{where_clause} '
|
if where_clause:
|
||||||
f'WHERE "{column}" IS NOT NULL AND "{column}"::TEXT != \'\' '
|
sql = f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{where_clause} AND {nonNullCond} ORDER BY val'
|
||||||
if not where_clause else
|
else:
|
||||||
f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{where_clause} '
|
sql = f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}" WHERE {nonNullCond} ORDER BY val'
|
||||||
f'AND "{column}" IS NOT NULL AND "{column}"::TEXT != \'\' '
|
|
||||||
)
|
|
||||||
sql += 'ORDER BY val'
|
|
||||||
|
|
||||||
with self.connection.cursor() as cursor:
|
with self.connection.cursor() as cursor:
|
||||||
cursor.execute(sql, values)
|
cursor.execute(sql, values)
|
||||||
return [row["val"] for row in cursor.fetchall()]
|
result: List[Optional[str]] = [row["val"] for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
if includeEmpty:
|
||||||
|
emptyCond = f'"{column}" IS NULL OR "{column}"::TEXT = \'\''
|
||||||
|
if where_clause:
|
||||||
|
emptySql = f'SELECT 1 FROM "{table}"{where_clause} AND ({emptyCond}) LIMIT 1'
|
||||||
|
else:
|
||||||
|
emptySql = f'SELECT 1 FROM "{table}" WHERE ({emptyCond}) LIMIT 1'
|
||||||
|
with self.connection.cursor() as cursor:
|
||||||
|
cursor.execute(emptySql, values)
|
||||||
|
if cursor.fetchone():
|
||||||
|
result.append(None)
|
||||||
|
|
||||||
|
return result
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error in getDistinctColumnValues for {table}.{column}: {e}")
|
logger.error(f"Error in getDistinctColumnValues for {table}.{column}: {e}")
|
||||||
return []
|
_rollbackQuietly(getattr(self, "connection", None))
|
||||||
|
raise DatabaseQueryError(table, str(e), original=e) from e
|
||||||
|
|
||||||
def recordCreate(
|
def recordCreate(
|
||||||
self, model_class: type, record: Union[Dict[str, Any], BaseModel]
|
self, model_class: type, record: Union[Dict[str, Any], BaseModel]
|
||||||
|
|
@ -1419,7 +1512,7 @@ class DatabaseConnector:
|
||||||
if not self._ensureTableExists(model_class):
|
if not self._ensureTableExists(model_class):
|
||||||
raise ValueError(f"Table {table} does not exist")
|
raise ValueError(f"Table {table} does not exist")
|
||||||
|
|
||||||
fields = _get_model_fields(model_class)
|
fields = getModelFields(model_class)
|
||||||
columns = ["id"] + [f for f in fields.keys() if f != "id"]
|
columns = ["id"] + [f for f in fields.keys() if f != "id"]
|
||||||
modelFields = model_class.model_fields
|
modelFields = model_class.model_fields
|
||||||
|
|
||||||
|
|
@ -1441,7 +1534,7 @@ class DatabaseConnector:
|
||||||
createdTs = rec.get("sysCreatedAt")
|
createdTs = rec.get("sysCreatedAt")
|
||||||
if createdTs is None or createdTs == 0 or createdTs == 0.0:
|
if createdTs is None or createdTs == 0 or createdTs == 0.0:
|
||||||
rec["sysCreatedAt"] = currentTime
|
rec["sysCreatedAt"] = currentTime
|
||||||
if effectiveUserId:
|
if effectiveUserId and not rec.get("sysCreatedBy"):
|
||||||
rec["sysCreatedBy"] = effectiveUserId
|
rec["sysCreatedBy"] = effectiveUserId
|
||||||
elif not rec.get("sysCreatedBy") and effectiveUserId:
|
elif not rec.get("sysCreatedBy") and effectiveUserId:
|
||||||
rec["sysCreatedBy"] = effectiveUserId
|
rec["sysCreatedBy"] = effectiveUserId
|
||||||
|
|
@ -1541,7 +1634,7 @@ class DatabaseConnector:
|
||||||
if not self._ensureTableExists(model_class):
|
if not self._ensureTableExists(model_class):
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
fields = _get_model_fields(model_class)
|
fields = getModelFields(model_class)
|
||||||
clauses: List[str] = []
|
clauses: List[str] = []
|
||||||
params: List[Any] = []
|
params: List[Any] = []
|
||||||
for key, val in recordFilter.items():
|
for key, val in recordFilter.items():
|
||||||
|
|
@ -1659,14 +1752,15 @@ class DatabaseConnector:
|
||||||
cursor.execute(query, params)
|
cursor.execute(query, params)
|
||||||
records = [dict(row) for row in cursor.fetchall()]
|
records = [dict(row) for row in cursor.fetchall()]
|
||||||
|
|
||||||
fields = _get_model_fields(modelClass)
|
fields = getModelFields(modelClass)
|
||||||
for record in records:
|
for record in records:
|
||||||
_parseRecordFields(record, fields, f"semanticSearch {table}")
|
parseRecordFields(record, fields, f"semanticSearch {table}")
|
||||||
|
|
||||||
return records
|
return records
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error in semantic search on {table}: {e}")
|
logger.error(f"Error in semantic search on {table}: {e}")
|
||||||
return []
|
_rollbackQuietly(getattr(self, "connection", None))
|
||||||
|
raise DatabaseQueryError(table, str(e), original=e) from e
|
||||||
|
|
||||||
def close(self, forceClose: bool = False):
|
def close(self, forceClose: bool = False):
|
||||||
"""Close the database connection.
|
"""Close the database connection.
|
||||||
|
|
|
||||||
|
|
@ -58,6 +58,12 @@ class ConnectorResolver:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
logger.warning("ClickupConnector not available")
|
logger.warning("ClickupConnector not available")
|
||||||
|
|
||||||
|
try:
|
||||||
|
from modules.connectors.providerInfomaniak.connectorInfomaniak import InfomaniakConnector
|
||||||
|
ConnectorResolver._providerRegistry["infomaniak"] = InfomaniakConnector
|
||||||
|
except ImportError:
|
||||||
|
logger.warning("InfomaniakConnector not available")
|
||||||
|
|
||||||
async def resolve(self, connectionId: str) -> ProviderConnector:
|
async def resolve(self, connectionId: str) -> ProviderConnector:
|
||||||
"""Resolve connectionId to a ProviderConnector with a fresh access token."""
|
"""Resolve connectionId to a ProviderConnector with a fresh access token."""
|
||||||
connection = await self._loadConnection(connectionId)
|
connection = await self._loadConnection(connectionId)
|
||||||
|
|
|
||||||
|
|
@ -210,6 +210,9 @@ class ClickupListsAdapter(ServiceAdapter):
|
||||||
data = await self._svc.getTask(task_id)
|
data = await self._svc.getTask(task_id)
|
||||||
if isinstance(data, dict) and data.get("error"):
|
if isinstance(data, dict) and data.get("error"):
|
||||||
return json.dumps(data).encode("utf-8")
|
return json.dumps(data).encode("utf-8")
|
||||||
|
returnedId = data.get("id", "") if isinstance(data, dict) else ""
|
||||||
|
if returnedId and returnedId != task_id:
|
||||||
|
logger.warning(f"ClickUp download: requested task_id={task_id} but API returned id={returnedId}")
|
||||||
payload = json.dumps(data, indent=2).encode("utf-8")
|
payload = json.dumps(data, indent=2).encode("utf-8")
|
||||||
return DownloadResult(data=payload, fileName=f"task-{task_id}.json", mimeType="application/json")
|
return DownloadResult(data=payload, fileName=f"task-{task_id}.json", mimeType="application/json")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,8 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
_DRIVE_BASE = "https://www.googleapis.com/drive/v3"
|
_DRIVE_BASE = "https://www.googleapis.com/drive/v3"
|
||||||
_GMAIL_BASE = "https://gmail.googleapis.com/gmail/v1"
|
_GMAIL_BASE = "https://gmail.googleapis.com/gmail/v1"
|
||||||
|
_CALENDAR_BASE = "https://www.googleapis.com/calendar/v3"
|
||||||
|
_PEOPLE_BASE = "https://people.googleapis.com/v1"
|
||||||
|
|
||||||
|
|
||||||
async def _googleGet(token: str, url: str) -> Dict[str, Any]:
|
async def _googleGet(token: str, url: str) -> Dict[str, Any]:
|
||||||
|
|
@ -274,12 +276,480 @@ class GmailAdapter(ServiceAdapter):
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class CalendarAdapter(ServiceAdapter):
|
||||||
|
"""Google Calendar ServiceAdapter -- browse calendars, list events, .ics download.
|
||||||
|
|
||||||
|
Path conventions:
|
||||||
|
``""`` / ``"/"`` -> list calendars from ``calendarList``
|
||||||
|
``"/<calendarId>"`` -> list upcoming events in that calendar
|
||||||
|
``"/<calendarId>/<eventId>"`` -> reserved for future event detail browse
|
||||||
|
"""
|
||||||
|
|
||||||
|
_DEFAULT_EVENT_LIMIT = 100
|
||||||
|
_MAX_EVENT_LIMIT = 2500
|
||||||
|
|
||||||
|
def __init__(self, accessToken: str):
|
||||||
|
self._token = accessToken
|
||||||
|
|
||||||
|
async def browse(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
cleanPath = (path or "").strip("/")
|
||||||
|
if not cleanPath:
|
||||||
|
url = f"{_CALENDAR_BASE}/users/me/calendarList?maxResults=250"
|
||||||
|
result = await _googleGet(self._token, url)
|
||||||
|
if "error" in result:
|
||||||
|
logger.warning(f"Google Calendar list failed: {result['error']}")
|
||||||
|
return []
|
||||||
|
calendars = result.get("items", [])
|
||||||
|
if filter:
|
||||||
|
f = filter.lower()
|
||||||
|
calendars = [c for c in calendars if f in (c.get("summary") or "").lower()]
|
||||||
|
return [
|
||||||
|
ExternalEntry(
|
||||||
|
name=c.get("summaryOverride") or c.get("summary", ""),
|
||||||
|
path=f"/{c.get('id', '')}",
|
||||||
|
isFolder=True,
|
||||||
|
metadata={
|
||||||
|
"id": c.get("id"),
|
||||||
|
"primary": c.get("primary", False),
|
||||||
|
"accessRole": c.get("accessRole"),
|
||||||
|
"backgroundColor": c.get("backgroundColor"),
|
||||||
|
"timeZone": c.get("timeZone"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
for c in calendars
|
||||||
|
]
|
||||||
|
|
||||||
|
from urllib.parse import quote
|
||||||
|
calendarId = cleanPath.split("/", 1)[0]
|
||||||
|
effectiveLimit = self._DEFAULT_EVENT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_EVENT_LIMIT))
|
||||||
|
url = (
|
||||||
|
f"{_CALENDAR_BASE}/calendars/{quote(calendarId, safe='')}/events"
|
||||||
|
f"?maxResults={effectiveLimit}&orderBy=startTime&singleEvents=true"
|
||||||
|
)
|
||||||
|
result = await _googleGet(self._token, url)
|
||||||
|
if "error" in result:
|
||||||
|
logger.warning(f"Google Calendar events failed: {result['error']}")
|
||||||
|
return []
|
||||||
|
events = result.get("items", [])
|
||||||
|
return [
|
||||||
|
ExternalEntry(
|
||||||
|
name=ev.get("summary", "(no title)"),
|
||||||
|
path=f"/{calendarId}/{ev.get('id', '')}",
|
||||||
|
isFolder=False,
|
||||||
|
mimeType="text/calendar",
|
||||||
|
metadata={
|
||||||
|
"id": ev.get("id"),
|
||||||
|
"start": (ev.get("start") or {}).get("dateTime") or (ev.get("start") or {}).get("date"),
|
||||||
|
"end": (ev.get("end") or {}).get("dateTime") or (ev.get("end") or {}).get("date"),
|
||||||
|
"location": ev.get("location"),
|
||||||
|
"organizer": (ev.get("organizer") or {}).get("email"),
|
||||||
|
"htmlLink": ev.get("htmlLink"),
|
||||||
|
"status": ev.get("status"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
for ev in events
|
||||||
|
]
|
||||||
|
|
||||||
|
async def download(self, path: str) -> DownloadResult:
|
||||||
|
from urllib.parse import quote
|
||||||
|
cleanPath = (path or "").strip("/")
|
||||||
|
if "/" not in cleanPath:
|
||||||
|
return DownloadResult()
|
||||||
|
calendarId, eventId = cleanPath.split("/", 1)
|
||||||
|
url = f"{_CALENDAR_BASE}/calendars/{quote(calendarId, safe='')}/events/{quote(eventId, safe='')}"
|
||||||
|
ev = await _googleGet(self._token, url)
|
||||||
|
if "error" in ev:
|
||||||
|
logger.warning(f"Google Calendar event fetch failed: {ev['error']}")
|
||||||
|
return DownloadResult()
|
||||||
|
icsBytes = _googleEventToIcs(ev)
|
||||||
|
summary = ev.get("summary") or eventId
|
||||||
|
safeName = _googleSafeFileName(summary) or "event"
|
||||||
|
return DownloadResult(
|
||||||
|
data=icsBytes,
|
||||||
|
fileName=f"{safeName}.ics",
|
||||||
|
mimeType="text/calendar",
|
||||||
|
)
|
||||||
|
|
||||||
|
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||||
|
return {"error": "Google Calendar upload not supported"}
|
||||||
|
|
||||||
|
async def search(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
from urllib.parse import quote
|
||||||
|
calendarId = (path or "").strip("/").split("/", 1)[0] or "primary"
|
||||||
|
effectiveLimit = self._DEFAULT_EVENT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_EVENT_LIMIT))
|
||||||
|
url = (
|
||||||
|
f"{_CALENDAR_BASE}/calendars/{quote(calendarId, safe='')}/events"
|
||||||
|
f"?q={quote(query, safe='')}&maxResults={effectiveLimit}&singleEvents=true"
|
||||||
|
)
|
||||||
|
result = await _googleGet(self._token, url)
|
||||||
|
if "error" in result:
|
||||||
|
return []
|
||||||
|
return [
|
||||||
|
ExternalEntry(
|
||||||
|
name=ev.get("summary", "(no title)"),
|
||||||
|
path=f"/{calendarId}/{ev.get('id', '')}",
|
||||||
|
isFolder=False,
|
||||||
|
mimeType="text/calendar",
|
||||||
|
metadata={
|
||||||
|
"id": ev.get("id"),
|
||||||
|
"start": (ev.get("start") or {}).get("dateTime") or (ev.get("start") or {}).get("date"),
|
||||||
|
"end": (ev.get("end") or {}).get("dateTime") or (ev.get("end") or {}).get("date"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
for ev in result.get("items", [])
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ContactsAdapter(ServiceAdapter):
|
||||||
|
"""Google Contacts ServiceAdapter -- People API (read-only).
|
||||||
|
|
||||||
|
Path conventions:
|
||||||
|
``""`` / ``"/"`` -> list contact groups (incl. virtual ``all`` for the user's connections)
|
||||||
|
``"/all"`` -> list all ``people/me/connections``
|
||||||
|
``"/<groupResourceName>"`` -> list members of that contact group (e.g. ``contactGroups/myFriends``)
|
||||||
|
``"/<group>/<personId>"`` -> reserved for future detail browse;
|
||||||
|
``personId`` is the suffix after ``people/``
|
||||||
|
"""
|
||||||
|
|
||||||
|
_DEFAULT_CONTACT_LIMIT = 200
|
||||||
|
_MAX_CONTACT_LIMIT = 1000
|
||||||
|
_PERSON_FIELDS = (
|
||||||
|
"names,emailAddresses,phoneNumbers,organizations,addresses,biographies,memberships"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, accessToken: str):
|
||||||
|
self._token = accessToken
|
||||||
|
|
||||||
|
async def browse(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
cleanPath = (path or "").strip("/")
|
||||||
|
if not cleanPath:
|
||||||
|
entries: List[ExternalEntry] = [
|
||||||
|
ExternalEntry(
|
||||||
|
name="Alle Kontakte",
|
||||||
|
path="/all",
|
||||||
|
isFolder=True,
|
||||||
|
metadata={"id": "all", "isVirtual": True},
|
||||||
|
),
|
||||||
|
]
|
||||||
|
url = f"{_PEOPLE_BASE}/contactGroups?pageSize=200"
|
||||||
|
result = await _googleGet(self._token, url)
|
||||||
|
if "error" not in result:
|
||||||
|
for grp in result.get("contactGroups", []):
|
||||||
|
name = grp.get("formattedName") or grp.get("name") or ""
|
||||||
|
if not name:
|
||||||
|
continue
|
||||||
|
entries.append(
|
||||||
|
ExternalEntry(
|
||||||
|
name=name,
|
||||||
|
path=f"/{grp.get('resourceName', '')}",
|
||||||
|
isFolder=True,
|
||||||
|
metadata={
|
||||||
|
"id": grp.get("resourceName"),
|
||||||
|
"memberCount": grp.get("memberCount", 0),
|
||||||
|
"groupType": grp.get("groupType"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.warning(f"Google contactGroups list failed: {result['error']}")
|
||||||
|
return entries
|
||||||
|
|
||||||
|
from urllib.parse import quote
|
||||||
|
effectiveLimit = self._DEFAULT_CONTACT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_CONTACT_LIMIT))
|
||||||
|
groupRef = cleanPath.split("/", 1)[0]
|
||||||
|
if groupRef == "all":
|
||||||
|
url = (
|
||||||
|
f"{_PEOPLE_BASE}/people/me/connections"
|
||||||
|
f"?pageSize={min(effectiveLimit, 1000)}&personFields={self._PERSON_FIELDS}"
|
||||||
|
)
|
||||||
|
result = await _googleGet(self._token, url)
|
||||||
|
if "error" in result:
|
||||||
|
logger.warning(f"Google People connections failed: {result['error']}")
|
||||||
|
return []
|
||||||
|
people = result.get("connections", [])
|
||||||
|
else:
|
||||||
|
groupResource = groupRef
|
||||||
|
grpUrl = (
|
||||||
|
f"{_PEOPLE_BASE}/{quote(groupResource, safe='/')}"
|
||||||
|
f"?maxMembers={min(effectiveLimit, 1000)}"
|
||||||
|
)
|
||||||
|
grpResult = await _googleGet(self._token, grpUrl)
|
||||||
|
if "error" in grpResult:
|
||||||
|
logger.warning(f"Google contactGroup detail failed: {grpResult['error']}")
|
||||||
|
return []
|
||||||
|
memberResourceNames = grpResult.get("memberResourceNames") or []
|
||||||
|
if not memberResourceNames:
|
||||||
|
return []
|
||||||
|
chunkSize = 200
|
||||||
|
people: List[Dict[str, Any]] = []
|
||||||
|
for i in range(0, min(len(memberResourceNames), effectiveLimit), chunkSize):
|
||||||
|
chunk = memberResourceNames[i : i + chunkSize]
|
||||||
|
params = "&".join(f"resourceNames={quote(rn, safe='/')}" for rn in chunk)
|
||||||
|
batchUrl = f"{_PEOPLE_BASE}/people:batchGet?{params}&personFields={self._PERSON_FIELDS}"
|
||||||
|
batchResult = await _googleGet(self._token, batchUrl)
|
||||||
|
if "error" in batchResult:
|
||||||
|
logger.warning(f"Google People batchGet failed: {batchResult['error']}")
|
||||||
|
continue
|
||||||
|
for resp in batchResult.get("responses", []):
|
||||||
|
person = resp.get("person")
|
||||||
|
if person:
|
||||||
|
people.append(person)
|
||||||
|
if len(people) >= effectiveLimit:
|
||||||
|
break
|
||||||
|
|
||||||
|
return [
|
||||||
|
ExternalEntry(
|
||||||
|
name=_googlePersonLabel(p) or "(no name)",
|
||||||
|
path=f"/{groupRef}/{(p.get('resourceName', '') or '').split('/')[-1]}",
|
||||||
|
isFolder=False,
|
||||||
|
mimeType="text/vcard",
|
||||||
|
metadata={
|
||||||
|
"id": p.get("resourceName"),
|
||||||
|
"emails": [e.get("value") for e in (p.get("emailAddresses") or []) if e.get("value")],
|
||||||
|
"phones": [pn.get("value") for pn in (p.get("phoneNumbers") or []) if pn.get("value")],
|
||||||
|
"organization": (p.get("organizations") or [{}])[0].get("name") if p.get("organizations") else None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
for p in people[:effectiveLimit]
|
||||||
|
]
|
||||||
|
|
||||||
|
async def download(self, path: str) -> DownloadResult:
|
||||||
|
from urllib.parse import quote
|
||||||
|
cleanPath = (path or "").strip("/")
|
||||||
|
if "/" not in cleanPath:
|
||||||
|
return DownloadResult()
|
||||||
|
personSuffix = cleanPath.split("/")[-1]
|
||||||
|
if not personSuffix:
|
||||||
|
return DownloadResult()
|
||||||
|
url = f"{_PEOPLE_BASE}/people/{quote(personSuffix, safe='')}?personFields={self._PERSON_FIELDS}"
|
||||||
|
person = await _googleGet(self._token, url)
|
||||||
|
if "error" in person:
|
||||||
|
logger.warning(f"Google People fetch failed: {person['error']}")
|
||||||
|
return DownloadResult()
|
||||||
|
vcfBytes = _googlePersonToVcard(person)
|
||||||
|
label = _googlePersonLabel(person) or personSuffix
|
||||||
|
safeName = _googleSafeFileName(label) or "contact"
|
||||||
|
return DownloadResult(
|
||||||
|
data=vcfBytes,
|
||||||
|
fileName=f"{safeName}.vcf",
|
||||||
|
mimeType="text/vcard",
|
||||||
|
)
|
||||||
|
|
||||||
|
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||||
|
return {"error": "Google Contacts upload not supported"}
|
||||||
|
|
||||||
|
async def search(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
from urllib.parse import quote
|
||||||
|
effectiveLimit = self._DEFAULT_CONTACT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_CONTACT_LIMIT))
|
||||||
|
url = (
|
||||||
|
f"{_PEOPLE_BASE}/people:searchContacts"
|
||||||
|
f"?query={quote(query, safe='')}&pageSize={min(effectiveLimit, 30)}"
|
||||||
|
f"&readMask={self._PERSON_FIELDS}"
|
||||||
|
)
|
||||||
|
result = await _googleGet(self._token, url)
|
||||||
|
if "error" in result:
|
||||||
|
return []
|
||||||
|
entries: List[ExternalEntry] = []
|
||||||
|
for r in result.get("results", []):
|
||||||
|
p = r.get("person") or {}
|
||||||
|
entries.append(
|
||||||
|
ExternalEntry(
|
||||||
|
name=_googlePersonLabel(p) or "(no name)",
|
||||||
|
path=f"/search/{(p.get('resourceName', '') or '').split('/')[-1]}",
|
||||||
|
isFolder=False,
|
||||||
|
mimeType="text/vcard",
|
||||||
|
metadata={
|
||||||
|
"id": p.get("resourceName"),
|
||||||
|
"emails": [e.get("value") for e in (p.get("emailAddresses") or []) if e.get("value")],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return entries
|
||||||
|
|
||||||
|
|
||||||
|
def _googleSafeFileName(name: str) -> str:
|
||||||
|
import re
|
||||||
|
return re.sub(r'[<>:"/\\|?*\x00-\x1f]', "_", name or "")[:80].strip(". ")
|
||||||
|
|
||||||
|
|
||||||
|
def _googleIcsEscape(value: str) -> str:
|
||||||
|
if value is None:
|
||||||
|
return ""
|
||||||
|
return (
|
||||||
|
value.replace("\\", "\\\\")
|
||||||
|
.replace(";", "\\;")
|
||||||
|
.replace(",", "\\,")
|
||||||
|
.replace("\r\n", "\\n")
|
||||||
|
.replace("\n", "\\n")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _googleIcsDateTime(value: Optional[str]) -> Optional[str]:
|
||||||
|
"""Convert a Google Calendar dateTime/date string to RFC 5545 format (UTC)."""
|
||||||
|
if not value:
|
||||||
|
return None
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
try:
|
||||||
|
if "T" not in value:
|
||||||
|
dt = datetime.strptime(value, "%Y-%m-%d")
|
||||||
|
return dt.strftime("%Y%m%d")
|
||||||
|
normalized = value.replace("Z", "+00:00") if value.endswith("Z") else value
|
||||||
|
dt = datetime.fromisoformat(normalized)
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
|
return dt.astimezone(timezone.utc).strftime("%Y%m%dT%H%M%SZ")
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _googleEventToIcs(event: Dict[str, Any]) -> bytes:
|
||||||
|
"""Build a minimal RFC 5545 VCALENDAR/VEVENT for a Google Calendar event."""
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
uid = event.get("iCalUID") or event.get("id") or "unknown@poweron"
|
||||||
|
summary = _googleIcsEscape(event.get("summary") or "")
|
||||||
|
location = _googleIcsEscape(event.get("location") or "")
|
||||||
|
description = _googleIcsEscape(event.get("description") or "")
|
||||||
|
rawStart = (event.get("start") or {}).get("dateTime") or (event.get("start") or {}).get("date")
|
||||||
|
rawEnd = (event.get("end") or {}).get("dateTime") or (event.get("end") or {}).get("date")
|
||||||
|
isAllDay = bool((event.get("start") or {}).get("date") and not (event.get("start") or {}).get("dateTime"))
|
||||||
|
dtstart = _googleIcsDateTime(rawStart)
|
||||||
|
dtend = _googleIcsDateTime(rawEnd)
|
||||||
|
dtstamp = _googleIcsDateTime(event.get("updated")) or datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%SZ")
|
||||||
|
|
||||||
|
lines = [
|
||||||
|
"BEGIN:VCALENDAR",
|
||||||
|
"VERSION:2.0",
|
||||||
|
"PRODID:-//PowerOn//Google-Calendar-Adapter//EN",
|
||||||
|
"CALSCALE:GREGORIAN",
|
||||||
|
"BEGIN:VEVENT",
|
||||||
|
f"UID:{uid}",
|
||||||
|
f"DTSTAMP:{dtstamp}",
|
||||||
|
]
|
||||||
|
if dtstart:
|
||||||
|
lines.append(f"DTSTART;VALUE=DATE:{dtstart}" if isAllDay else f"DTSTART:{dtstart}")
|
||||||
|
if dtend:
|
||||||
|
lines.append(f"DTEND;VALUE=DATE:{dtend}" if isAllDay else f"DTEND:{dtend}")
|
||||||
|
if summary:
|
||||||
|
lines.append(f"SUMMARY:{summary}")
|
||||||
|
if location:
|
||||||
|
lines.append(f"LOCATION:{location}")
|
||||||
|
if description:
|
||||||
|
lines.append(f"DESCRIPTION:{description}")
|
||||||
|
organizer = (event.get("organizer") or {}).get("email")
|
||||||
|
if organizer:
|
||||||
|
lines.append(f"ORGANIZER:mailto:{organizer}")
|
||||||
|
for att in (event.get("attendees") or []):
|
||||||
|
addr = att.get("email")
|
||||||
|
if addr:
|
||||||
|
lines.append(f"ATTENDEE:mailto:{addr}")
|
||||||
|
lines.append("END:VEVENT")
|
||||||
|
lines.append("END:VCALENDAR")
|
||||||
|
return ("\r\n".join(lines) + "\r\n").encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def _googlePersonLabel(person: Dict[str, Any]) -> str:
|
||||||
|
names = person.get("names") or []
|
||||||
|
if names:
|
||||||
|
primary = names[0]
|
||||||
|
display = primary.get("displayName") or ""
|
||||||
|
if display:
|
||||||
|
return display
|
||||||
|
given = primary.get("givenName") or ""
|
||||||
|
family = primary.get("familyName") or ""
|
||||||
|
full = f"{given} {family}".strip()
|
||||||
|
if full:
|
||||||
|
return full
|
||||||
|
orgs = person.get("organizations") or []
|
||||||
|
if orgs and orgs[0].get("name"):
|
||||||
|
return orgs[0]["name"]
|
||||||
|
emails = person.get("emailAddresses") or []
|
||||||
|
if emails and emails[0].get("value"):
|
||||||
|
return emails[0]["value"]
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def _googlePersonToVcard(person: Dict[str, Any]) -> bytes:
|
||||||
|
"""Build a vCard 3.0 from a Google People API person payload."""
|
||||||
|
names = person.get("names") or []
|
||||||
|
primaryName = names[0] if names else {}
|
||||||
|
given = primaryName.get("givenName") or ""
|
||||||
|
family = primaryName.get("familyName") or ""
|
||||||
|
middle = primaryName.get("middleName") or ""
|
||||||
|
fn = primaryName.get("displayName") or _googlePersonLabel(person) or ""
|
||||||
|
|
||||||
|
lines = [
|
||||||
|
"BEGIN:VCARD",
|
||||||
|
"VERSION:3.0",
|
||||||
|
f"N:{family};{given};{middle};;",
|
||||||
|
f"FN:{fn}",
|
||||||
|
]
|
||||||
|
orgs = person.get("organizations") or []
|
||||||
|
if orgs:
|
||||||
|
org = orgs[0]
|
||||||
|
orgVal = org.get("name") or ""
|
||||||
|
if org.get("department"):
|
||||||
|
orgVal = f"{orgVal};{org['department']}"
|
||||||
|
if orgVal:
|
||||||
|
lines.append(f"ORG:{orgVal}")
|
||||||
|
if org.get("title"):
|
||||||
|
lines.append(f"TITLE:{org['title']}")
|
||||||
|
for em in (person.get("emailAddresses") or []):
|
||||||
|
addr = em.get("value")
|
||||||
|
if not addr:
|
||||||
|
continue
|
||||||
|
emailType = (em.get("type") or "INTERNET").upper()
|
||||||
|
lines.append(f"EMAIL;TYPE={emailType}:{addr}")
|
||||||
|
for ph in (person.get("phoneNumbers") or []):
|
||||||
|
val = ph.get("value")
|
||||||
|
if not val:
|
||||||
|
continue
|
||||||
|
phType = (ph.get("type") or "VOICE").upper()
|
||||||
|
lines.append(f"TEL;TYPE={phType}:{val}")
|
||||||
|
for addr in (person.get("addresses") or []):
|
||||||
|
street = addr.get("streetAddress") or ""
|
||||||
|
city = addr.get("city") or ""
|
||||||
|
region = addr.get("region") or ""
|
||||||
|
postal = addr.get("postalCode") or ""
|
||||||
|
country = addr.get("country") or ""
|
||||||
|
if any([street, city, region, postal, country]):
|
||||||
|
adrType = (addr.get("type") or "OTHER").upper()
|
||||||
|
lines.append(f"ADR;TYPE={adrType}:;;{street};{city};{region};{postal};{country}")
|
||||||
|
bios = person.get("biographies") or []
|
||||||
|
if bios and bios[0].get("value"):
|
||||||
|
lines.append(f"NOTE:{_googleIcsEscape(bios[0]['value'])}")
|
||||||
|
lines.append(f"UID:{person.get('resourceName', '')}")
|
||||||
|
lines.append("END:VCARD")
|
||||||
|
return ("\r\n".join(lines) + "\r\n").encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
class GoogleConnector(ProviderConnector):
|
class GoogleConnector(ProviderConnector):
|
||||||
"""Google ProviderConnector -- 1 connection -> Drive + Gmail."""
|
"""Google ProviderConnector -- 1 connection -> Drive + Gmail + Calendar + Contacts."""
|
||||||
|
|
||||||
_SERVICE_MAP = {
|
_SERVICE_MAP = {
|
||||||
"drive": DriveAdapter,
|
"drive": DriveAdapter,
|
||||||
"gmail": GmailAdapter,
|
"gmail": GmailAdapter,
|
||||||
|
"calendar": CalendarAdapter,
|
||||||
|
"contact": ContactsAdapter,
|
||||||
}
|
}
|
||||||
|
|
||||||
def getAvailableServices(self) -> List[str]:
|
def getAvailableServices(self) -> List[str]:
|
||||||
|
|
|
||||||
3
modules/connectors/providerInfomaniak/__init__.py
Normal file
3
modules/connectors/providerInfomaniak/__init__.py
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
# Copyright (c) 2025 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""Infomaniak Provider Connector -- 1 Connection : n Services (kDrive, Mail)."""
|
||||||
961
modules/connectors/providerInfomaniak/connectorInfomaniak.py
Normal file
961
modules/connectors/providerInfomaniak/connectorInfomaniak.py
Normal file
|
|
@ -0,0 +1,961 @@
|
||||||
|
# Copyright (c) 2025 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""Infomaniak ProviderConnector -- kDrive + Calendar + Contacts via PAT.
|
||||||
|
|
||||||
|
The PAT carries one or more of these scopes:
|
||||||
|
|
||||||
|
- ``drive`` -> kDrive (active here)
|
||||||
|
- ``workspace:calendar`` -> Calendar (active here)
|
||||||
|
- ``workspace:contact`` -> Contacts (active here)
|
||||||
|
- ``workspace:mail`` -> Mail (no public PAT-friendly endpoint yet)
|
||||||
|
|
||||||
|
Mail is intentionally NOT in ``_SERVICE_MAP`` until we find a
|
||||||
|
PAT-authenticated endpoint -- the public ``/1/mail`` and
|
||||||
|
``mail.infomaniak.com/api/pim/mail*`` routes either don't exist (404
|
||||||
|
nginx) or 302 to OAuth, so wiring a stub adapter would only confuse
|
||||||
|
users.
|
||||||
|
|
||||||
|
Path conventions (leading slash, ``ServiceAdapter`` paths always start with
|
||||||
|
``/``):
|
||||||
|
kDrive (api.infomaniak.com, requires ``account_id`` query arg):
|
||||||
|
/ -- list drives in the user's account
|
||||||
|
/{driveId} -- root folder of a drive
|
||||||
|
/{driveId}/{fileId} -- folder children OR file (download)
|
||||||
|
Calendar (calendar.infomaniak.com PIM):
|
||||||
|
/ -- list calendars accessible to the user
|
||||||
|
/{calendarId} -- events of one calendar
|
||||||
|
/{calendarId}/{eventId} -- single event (.ics download)
|
||||||
|
Contacts (contacts.infomaniak.com PIM):
|
||||||
|
/ -- list address books
|
||||||
|
/{addressBookId} -- contacts in that address book
|
||||||
|
/{addressBookId}/{contactId} -- single contact (.vcf download)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from typing import Any, Dict, List, Optional, TypedDict
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
from modules.connectors.connectorProviderBase import (
|
||||||
|
ProviderConnector,
|
||||||
|
ServiceAdapter,
|
||||||
|
DownloadResult,
|
||||||
|
)
|
||||||
|
from modules.datamodels.datamodelDataSource import ExternalEntry
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_API_BASE = "https://api.infomaniak.com"
|
||||||
|
_CALENDAR_BASE = "https://calendar.infomaniak.com"
|
||||||
|
_CONTACTS_BASE = "https://contacts.infomaniak.com"
|
||||||
|
_PIM_PREFIX = "/api/pim"
|
||||||
|
|
||||||
|
|
||||||
|
class InfomaniakOwnerIdentity(TypedDict):
|
||||||
|
"""Minimal identity payload for the PAT owner.
|
||||||
|
|
||||||
|
``accountId`` is the only field the kDrive adapter needs at runtime.
|
||||||
|
``displayName`` is harvested for the connection UI; both fields come
|
||||||
|
from the same PIM Owner record.
|
||||||
|
"""
|
||||||
|
|
||||||
|
accountId: int
|
||||||
|
displayName: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class InfomaniakIdentityError(RuntimeError):
|
||||||
|
"""Raised when no owner identity can be derived from a PAT."""
|
||||||
|
|
||||||
|
|
||||||
|
async def _infomaniakGet(
|
||||||
|
token: str,
|
||||||
|
endpoint: str,
|
||||||
|
baseUrl: str = _API_BASE,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Single GET against an Infomaniak host.
|
||||||
|
|
||||||
|
``endpoint`` is appended to ``baseUrl`` (handles leading slash). Returns
|
||||||
|
parsed JSON, or ``{'error': ...}`` for non-2xx / network failures.
|
||||||
|
"""
|
||||||
|
url = f"{baseUrl.rstrip('/')}/{endpoint.lstrip('/')}"
|
||||||
|
headers = {"Authorization": f"Bearer {token}", "Accept": "application/json"}
|
||||||
|
timeout = aiohttp.ClientTimeout(total=20)
|
||||||
|
try:
|
||||||
|
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||||
|
async with session.get(url, headers=headers, allow_redirects=False) as resp:
|
||||||
|
if resp.status in (200, 201):
|
||||||
|
return await resp.json()
|
||||||
|
errorText = await resp.text()
|
||||||
|
logger.warning(f"Infomaniak GET {url} -> {resp.status}: {errorText[:300]}")
|
||||||
|
return {"error": f"{resp.status}: {errorText[:200]}"}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Infomaniak GET {url} crashed: {e}")
|
||||||
|
return {"error": str(e)}
|
||||||
|
|
||||||
|
|
||||||
|
async def _infomaniakDownload(
|
||||||
|
token: str,
|
||||||
|
endpoint: str,
|
||||||
|
baseUrl: str = _API_BASE,
|
||||||
|
) -> Optional[bytes]:
|
||||||
|
"""Binary download from an Infomaniak host. Returns bytes or ``None``.
|
||||||
|
|
||||||
|
Unlike :func:`_infomaniakGet`, this follows redirects: kDrive's
|
||||||
|
``/2/drive/{driveId}/files/{fileId}/download`` answers with
|
||||||
|
``302 -> presigned CDN URL`` (standard for bandwidth-heavy
|
||||||
|
transfers), and the same pattern shows up on Calendar/Contacts
|
||||||
|
export endpoints. Refusing to follow would lose every download.
|
||||||
|
The Authorization header is preserved across the redirect by
|
||||||
|
aiohttp because the host is the same Infomaniak property.
|
||||||
|
"""
|
||||||
|
url = f"{baseUrl.rstrip('/')}/{endpoint.lstrip('/')}"
|
||||||
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
|
timeout = aiohttp.ClientTimeout(total=120)
|
||||||
|
try:
|
||||||
|
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||||
|
async with session.get(url, headers=headers, allow_redirects=True) as resp:
|
||||||
|
if resp.status == 200:
|
||||||
|
return await resp.read()
|
||||||
|
logger.warning(
|
||||||
|
f"Infomaniak download {url} -> {resp.status}: "
|
||||||
|
f"{(await resp.text())[:300]}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Infomaniak download {url} crashed: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _unwrapData(payload: Any) -> Any:
|
||||||
|
"""Infomaniak wraps successful responses as ``{result: 'success', data: ...}``."""
|
||||||
|
if isinstance(payload, dict) and "data" in payload and "result" in payload:
|
||||||
|
return payload.get("data")
|
||||||
|
return payload
|
||||||
|
|
||||||
|
|
||||||
|
def _firstOwnerRecord(payload: Any, listKey: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Pick the first user-owned record from a PIM list response.
|
||||||
|
|
||||||
|
Both PIM Calendar (``calendars``) and PIM Contacts (``addressbooks``)
|
||||||
|
return ``{result, data: {<listKey>: [...]}}``. Owner-records have a
|
||||||
|
positive numeric ``user_id`` and an integer ``account_id``; shared /
|
||||||
|
public records (e.g. holiday calendars) carry ``user_id = -1`` and
|
||||||
|
``account_id = null`` and are skipped.
|
||||||
|
"""
|
||||||
|
data = _unwrapData(payload) if payload else None
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return None
|
||||||
|
records = data.get(listKey)
|
||||||
|
if not isinstance(records, list):
|
||||||
|
return None
|
||||||
|
for rec in records:
|
||||||
|
if not isinstance(rec, dict):
|
||||||
|
continue
|
||||||
|
userId = rec.get("user_id")
|
||||||
|
accountId = rec.get("account_id")
|
||||||
|
if isinstance(userId, int) and userId > 0 and isinstance(accountId, int):
|
||||||
|
return rec
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def resolveOwnerIdentity(token: str) -> InfomaniakOwnerIdentity:
|
||||||
|
"""Derive the PAT owner's display identity from PIM Calendar / Contacts.
|
||||||
|
|
||||||
|
Used purely for UI display on the connection (``externalUsername`` /
|
||||||
|
``externalId``). The PIM endpoints embed the kSuite ``account_id``
|
||||||
|
and the user's display name in their owner records, which is what
|
||||||
|
the ConnectionsPage shows.
|
||||||
|
|
||||||
|
Calendar is queried first because it is the more universally
|
||||||
|
provisioned PIM service; Contacts is the equivalent fallback.
|
||||||
|
Raises :class:`InfomaniakIdentityError` when neither yields an
|
||||||
|
owner record.
|
||||||
|
"""
|
||||||
|
sources = (
|
||||||
|
(_CALENDAR_BASE, f"{_PIM_PREFIX}/calendar", "calendars"),
|
||||||
|
(_CONTACTS_BASE, f"{_PIM_PREFIX}/addressbook", "addressbooks"),
|
||||||
|
)
|
||||||
|
for baseUrl, endpoint, listKey in sources:
|
||||||
|
payload = await _infomaniakGet(token, endpoint, baseUrl=baseUrl)
|
||||||
|
if isinstance(payload, dict) and payload.get("error"):
|
||||||
|
continue
|
||||||
|
owner = _firstOwnerRecord(payload, listKey)
|
||||||
|
if owner is None:
|
||||||
|
continue
|
||||||
|
return InfomaniakOwnerIdentity(
|
||||||
|
accountId=int(owner["account_id"]),
|
||||||
|
displayName=owner.get("name") or None,
|
||||||
|
)
|
||||||
|
raise InfomaniakIdentityError(
|
||||||
|
"Could not resolve Infomaniak owner identity from PIM Calendar or "
|
||||||
|
"Contacts. The PAT must carry 'workspace:calendar' or "
|
||||||
|
"'workspace:contact' so we can label the connection."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def listAccessibleDrives(token: str) -> List[Dict[str, Any]]:
|
||||||
|
"""Return every kDrive the PAT can reach (admin OR user role).
|
||||||
|
|
||||||
|
Hits ``GET /2/drive/init?with=drives`` -- the only PAT-friendly
|
||||||
|
endpoint that enumerates a user's drives **independently of the
|
||||||
|
Drive-Manager admin role**. The plain ``/2/drive?account_id=...``
|
||||||
|
listing is filtered to drives where the caller is an admin and
|
||||||
|
therefore returns an empty array for everyone with ``role: user``,
|
||||||
|
even though the same user can read/write the drive's files via
|
||||||
|
``/2/drive/{driveId}/...``.
|
||||||
|
|
||||||
|
The endpoint requires only the ``drive`` PAT scope -- no
|
||||||
|
``accounts``, no ``user_info``, no admin permission. Each entry
|
||||||
|
matches the shape documented for ``GET /2/drive/{drive_id}``
|
||||||
|
(``id``, ``name``, ``account_id``, ``role``, ...).
|
||||||
|
|
||||||
|
Raises :class:`InfomaniakIdentityError` when the PAT does not carry
|
||||||
|
the ``drive`` scope or the response is malformed.
|
||||||
|
"""
|
||||||
|
payload = await _infomaniakGet(token, "/2/drive/init?with=drives")
|
||||||
|
if isinstance(payload, dict) and payload.get("error"):
|
||||||
|
raise InfomaniakIdentityError(
|
||||||
|
"Could not list Infomaniak kDrives. The PAT must carry the "
|
||||||
|
f"'drive' scope (/2/drive/init said: {payload['error']})."
|
||||||
|
)
|
||||||
|
data = _unwrapData(payload)
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
raise InfomaniakIdentityError(
|
||||||
|
"Unexpected /2/drive/init response shape (expected an object)."
|
||||||
|
)
|
||||||
|
drives = data.get("drives") or []
|
||||||
|
if not isinstance(drives, list):
|
||||||
|
raise InfomaniakIdentityError(
|
||||||
|
"Unexpected /2/drive/init response: 'drives' is not a list."
|
||||||
|
)
|
||||||
|
return [d for d in drives if isinstance(d, dict) and d.get("id")]
|
||||||
|
|
||||||
|
|
||||||
|
def _lastNumericSegment(segments: List[str]) -> Optional[str]:
|
||||||
|
"""Return the last all-digit segment (kDrive file/folder IDs are int).
|
||||||
|
|
||||||
|
The agent sometimes appends the human-readable filename to a path,
|
||||||
|
e.g. ``/2980592/12/platform-overview.html``. The kDrive API does
|
||||||
|
not accept names -- only numeric IDs -- so we strip trailing
|
||||||
|
non-numeric segments and pick the last integer ID.
|
||||||
|
Returns ``None`` if no numeric segment exists.
|
||||||
|
"""
|
||||||
|
for seg in reversed(segments):
|
||||||
|
if seg.isdigit():
|
||||||
|
return seg
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class KdriveAdapter(ServiceAdapter):
|
||||||
|
"""kDrive ServiceAdapter -- browse drives, folders, files.
|
||||||
|
|
||||||
|
Drive enumeration goes through :func:`listAccessibleDrives` which
|
||||||
|
calls ``/2/drive/init?with=drives``. That endpoint returns every
|
||||||
|
drive the PAT can read regardless of the Drive-Manager admin role
|
||||||
|
-- unlike the documented ``/2/drive?account_id=...`` listing which
|
||||||
|
silently returns an empty array for users with ``role: 'user'``
|
||||||
|
(the most common case for kSuite members).
|
||||||
|
|
||||||
|
The drive list is cached on the adapter instance so each browse
|
||||||
|
pays for one ``/2/drive/init`` call at most.
|
||||||
|
|
||||||
|
File-vs-folder handling: a DataSource may point at a single file
|
||||||
|
(e.g. ``/{driveId}/{fileId}`` where ``fileId`` is a regular file).
|
||||||
|
Calling ``/files/{fileId}/files`` on a file answers
|
||||||
|
``400 destination_not_a_directory`` -- so :meth:`browse` first
|
||||||
|
fetches the item's metadata and, if ``type=file``, returns a
|
||||||
|
one-element list describing the file itself instead of pretending
|
||||||
|
the directory is empty.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, accessToken: str):
|
||||||
|
self._token = accessToken
|
||||||
|
self._drives: Optional[List[Dict[str, Any]]] = None
|
||||||
|
|
||||||
|
async def _ensureDrives(self) -> List[Dict[str, Any]]:
|
||||||
|
if self._drives is not None:
|
||||||
|
return self._drives
|
||||||
|
self._drives = await listAccessibleDrives(self._token)
|
||||||
|
return self._drives
|
||||||
|
|
||||||
|
async def _fetchItemMeta(self, driveId: str, fileId: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Return the kDrive file/folder metadata dict, or ``None`` on error."""
|
||||||
|
meta = await _infomaniakGet(self._token, f"/2/drive/{driveId}/files/{fileId}")
|
||||||
|
if not isinstance(meta, dict) or meta.get("error"):
|
||||||
|
return None
|
||||||
|
data = _unwrapData(meta)
|
||||||
|
return data if isinstance(data, dict) else None
|
||||||
|
|
||||||
|
async def browse(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
cleanPath = (path or "").strip("/")
|
||||||
|
segments = [s for s in cleanPath.split("/") if s]
|
||||||
|
|
||||||
|
if not segments:
|
||||||
|
return await self._listDrives()
|
||||||
|
|
||||||
|
driveId = segments[0]
|
||||||
|
if len(segments) == 1:
|
||||||
|
return await self._listChildren(driveId, fileId=None, limit=limit)
|
||||||
|
|
||||||
|
fileId = _lastNumericSegment(segments[1:])
|
||||||
|
if fileId is None:
|
||||||
|
return []
|
||||||
|
|
||||||
|
meta = await self._fetchItemMeta(driveId, fileId)
|
||||||
|
if meta is not None and meta.get("type") == "file":
|
||||||
|
return [ExternalEntry(
|
||||||
|
name=meta.get("name") or fileId,
|
||||||
|
path=f"/{driveId}/{fileId}",
|
||||||
|
isFolder=False,
|
||||||
|
size=meta.get("size"),
|
||||||
|
mimeType=meta.get("mime_type"),
|
||||||
|
lastModified=meta.get("last_modified_at"),
|
||||||
|
metadata={"id": fileId, "kind": "file"},
|
||||||
|
)]
|
||||||
|
return await self._listChildren(driveId, fileId=fileId, limit=limit)
|
||||||
|
|
||||||
|
async def _listDrives(self) -> List[ExternalEntry]:
|
||||||
|
drives = await self._ensureDrives()
|
||||||
|
entries: List[ExternalEntry] = []
|
||||||
|
for drive in drives:
|
||||||
|
driveId = str(drive.get("id", ""))
|
||||||
|
if not driveId:
|
||||||
|
continue
|
||||||
|
entries.append(ExternalEntry(
|
||||||
|
name=drive.get("name") or driveId,
|
||||||
|
path=f"/{driveId}",
|
||||||
|
isFolder=True,
|
||||||
|
metadata={
|
||||||
|
"id": driveId,
|
||||||
|
"kind": "drive",
|
||||||
|
"accountId": drive.get("account_id"),
|
||||||
|
"role": drive.get("role"),
|
||||||
|
},
|
||||||
|
))
|
||||||
|
return entries
|
||||||
|
|
||||||
|
async def _listChildren(
|
||||||
|
self,
|
||||||
|
driveId: str,
|
||||||
|
fileId: Optional[str],
|
||||||
|
limit: Optional[int],
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
if fileId is None:
|
||||||
|
endpoint = f"/2/drive/{driveId}/files"
|
||||||
|
else:
|
||||||
|
endpoint = f"/2/drive/{driveId}/files/{fileId}/files"
|
||||||
|
|
||||||
|
pageSize = max(1, min(int(limit or 200), 1000))
|
||||||
|
endpoint = f"{endpoint}?per_page={pageSize}"
|
||||||
|
|
||||||
|
result = await _infomaniakGet(self._token, endpoint)
|
||||||
|
if isinstance(result, dict) and result.get("error"):
|
||||||
|
logger.warning(
|
||||||
|
f"kDrive list-children {driveId}/{fileId or 'root'} failed: {result['error']}"
|
||||||
|
)
|
||||||
|
return []
|
||||||
|
data = _unwrapData(result)
|
||||||
|
items = data if isinstance(data, list) else data.get("items", []) if isinstance(data, dict) else []
|
||||||
|
|
||||||
|
entries: List[ExternalEntry] = []
|
||||||
|
for item in items:
|
||||||
|
itemId = str(item.get("id", ""))
|
||||||
|
if not itemId:
|
||||||
|
continue
|
||||||
|
isFolder = item.get("type") == "dir"
|
||||||
|
entries.append(ExternalEntry(
|
||||||
|
name=item.get("name", itemId),
|
||||||
|
path=f"/{driveId}/{itemId}",
|
||||||
|
isFolder=isFolder,
|
||||||
|
size=item.get("size") if not isFolder else None,
|
||||||
|
mimeType=item.get("mime_type") if not isFolder else None,
|
||||||
|
lastModified=item.get("last_modified_at"),
|
||||||
|
metadata={"id": itemId, "kind": item.get("type", "")},
|
||||||
|
))
|
||||||
|
return entries
|
||||||
|
|
||||||
|
async def download(self, path: str) -> DownloadResult:
|
||||||
|
segments = [s for s in (path or "").strip("/").split("/") if s]
|
||||||
|
if len(segments) < 2:
|
||||||
|
return DownloadResult()
|
||||||
|
driveId = segments[0]
|
||||||
|
# Agent may append the filename: ``/{driveId}/{fileId}/{name}``.
|
||||||
|
# Pull the last numeric segment instead of trusting segments[-1].
|
||||||
|
fileId = _lastNumericSegment(segments[1:])
|
||||||
|
if fileId is None:
|
||||||
|
return DownloadResult()
|
||||||
|
|
||||||
|
meta = await self._fetchItemMeta(driveId, fileId)
|
||||||
|
fileName = (meta or {}).get("name") or fileId
|
||||||
|
mimeType = (meta or {}).get("mime_type") or "application/octet-stream"
|
||||||
|
|
||||||
|
content = await _infomaniakDownload(
|
||||||
|
self._token, f"/2/drive/{driveId}/files/{fileId}/download"
|
||||||
|
)
|
||||||
|
if content is None:
|
||||||
|
return DownloadResult()
|
||||||
|
return DownloadResult(data=content, fileName=fileName, mimeType=mimeType)
|
||||||
|
|
||||||
|
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||||
|
return {"error": "kDrive upload not yet implemented"}
|
||||||
|
|
||||||
|
async def search(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
segments = [s for s in (path or "").strip("/").split("/") if s]
|
||||||
|
if not segments:
|
||||||
|
drives = await self._listDrives()
|
||||||
|
if not drives:
|
||||||
|
return []
|
||||||
|
driveId = (drives[0].metadata or {}).get("id") or drives[0].path.strip("/")
|
||||||
|
else:
|
||||||
|
driveId = segments[0]
|
||||||
|
|
||||||
|
pageSize = max(1, min(int(limit or 50), 200))
|
||||||
|
endpoint = f"/2/drive/{driveId}/files/search?query={query}&per_page={pageSize}"
|
||||||
|
result = await _infomaniakGet(self._token, endpoint)
|
||||||
|
if isinstance(result, dict) and result.get("error"):
|
||||||
|
return []
|
||||||
|
data = _unwrapData(result)
|
||||||
|
items = data if isinstance(data, list) else data.get("items", []) if isinstance(data, dict) else []
|
||||||
|
|
||||||
|
entries: List[ExternalEntry] = []
|
||||||
|
for item in items:
|
||||||
|
itemId = str(item.get("id", ""))
|
||||||
|
if not itemId:
|
||||||
|
continue
|
||||||
|
isFolder = item.get("type") == "dir"
|
||||||
|
entries.append(ExternalEntry(
|
||||||
|
name=item.get("name", itemId),
|
||||||
|
path=f"/{driveId}/{itemId}",
|
||||||
|
isFolder=isFolder,
|
||||||
|
size=item.get("size") if not isFolder else None,
|
||||||
|
mimeType=item.get("mime_type") if not isFolder else None,
|
||||||
|
metadata={"id": itemId},
|
||||||
|
))
|
||||||
|
return entries
|
||||||
|
|
||||||
|
|
||||||
|
def _safeFileName(label: str, fallback: str) -> str:
|
||||||
|
"""Sanitize a string for use as a filename. Trims and caps at 80 chars."""
|
||||||
|
cleaned = re.sub(r'[<>:"/\\|?*\x00-\x1f]', "_", str(label or "")).strip(". ")
|
||||||
|
return cleaned[:80] or fallback
|
||||||
|
|
||||||
|
|
||||||
|
class CalendarAdapter(ServiceAdapter):
|
||||||
|
"""Infomaniak Calendar adapter -- browse calendars + events, .ics download.
|
||||||
|
|
||||||
|
Uses the public PIM endpoints at ``calendar.infomaniak.com/api/pim``,
|
||||||
|
which authenticate with the PAT scope ``workspace:calendar``.
|
||||||
|
|
||||||
|
Path layout:
|
||||||
|
``/`` -> list calendars
|
||||||
|
``/{calendarId}`` -> list events of that calendar
|
||||||
|
``/{calendarId}/{eventId}`` -> single event (download as .ics)
|
||||||
|
|
||||||
|
Endpoint particulars:
|
||||||
|
Listing events runs against ``/api/pim/event`` with the calendar
|
||||||
|
id as a query arg (the per-calendar nested route
|
||||||
|
``/calendar/{id}/event`` is **not** PAT-friendly -- it 302s to the
|
||||||
|
OAuth login page). Infomaniak enforces a hard ``from``/``to``
|
||||||
|
window of less than 3 months, so this adapter queries a fixed
|
||||||
|
90-day window centered on today (30 days back, 60 days forward),
|
||||||
|
which covers typical UDB browsing. Date format is ``Y-m-d H:i:s``.
|
||||||
|
Event detail and ``.ics`` export are addressed by event id alone
|
||||||
|
(``/api/pim/event/{eventId}`` and ``.../export``); the calendar
|
||||||
|
id from the path is kept only for tree-navigation continuity.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Vendor enforces ``Range must be lower than 3 months``. We stay
|
||||||
|
# comfortably below to keep one call per browse.
|
||||||
|
_PAST_DAYS = 30
|
||||||
|
_FUTURE_DAYS = 60
|
||||||
|
|
||||||
|
def __init__(self, accessToken: str):
|
||||||
|
self._token = accessToken
|
||||||
|
|
||||||
|
async def browse(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
segments = [s for s in (path or "").strip("/").split("/") if s]
|
||||||
|
if not segments:
|
||||||
|
return await self._listCalendars()
|
||||||
|
if len(segments) == 1:
|
||||||
|
return await self._listEvents(segments[0], limit=limit)
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def _listCalendars(self) -> List[ExternalEntry]:
|
||||||
|
result = await _infomaniakGet(
|
||||||
|
self._token, f"{_PIM_PREFIX}/calendar", baseUrl=_CALENDAR_BASE
|
||||||
|
)
|
||||||
|
if isinstance(result, dict) and result.get("error"):
|
||||||
|
logger.warning(f"Calendar list-calendars failed: {result['error']}")
|
||||||
|
return []
|
||||||
|
data = _unwrapData(result)
|
||||||
|
calendars = data.get("calendars", []) if isinstance(data, dict) else []
|
||||||
|
entries: List[ExternalEntry] = []
|
||||||
|
for cal in calendars:
|
||||||
|
calId = str(cal.get("id", ""))
|
||||||
|
if not calId:
|
||||||
|
continue
|
||||||
|
isShared = (cal.get("user_id") or 0) <= 0 or cal.get("account_id") is None
|
||||||
|
entries.append(ExternalEntry(
|
||||||
|
name=cal.get("name") or calId,
|
||||||
|
path=f"/{calId}",
|
||||||
|
isFolder=True,
|
||||||
|
metadata={
|
||||||
|
"id": calId,
|
||||||
|
"kind": "calendar",
|
||||||
|
"color": cal.get("color"),
|
||||||
|
"shared": isShared,
|
||||||
|
"default": bool(cal.get("default")),
|
||||||
|
},
|
||||||
|
))
|
||||||
|
return entries
|
||||||
|
|
||||||
|
def _eventWindow(self) -> tuple:
|
||||||
|
now = datetime.now(timezone.utc)
|
||||||
|
fromStr = (now - timedelta(days=self._PAST_DAYS)).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
toStr = (now + timedelta(days=self._FUTURE_DAYS)).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
return fromStr, toStr
|
||||||
|
|
||||||
|
async def _listEvents(
|
||||||
|
self,
|
||||||
|
calendarId: str,
|
||||||
|
limit: Optional[int],
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
fromStr, toStr = self._eventWindow()
|
||||||
|
endpoint = (
|
||||||
|
f"{_PIM_PREFIX}/event"
|
||||||
|
f"?calendar_id={calendarId}"
|
||||||
|
f"&from={quote(fromStr)}"
|
||||||
|
f"&to={quote(toStr)}"
|
||||||
|
)
|
||||||
|
result = await _infomaniakGet(self._token, endpoint, baseUrl=_CALENDAR_BASE)
|
||||||
|
if isinstance(result, dict) and result.get("error"):
|
||||||
|
logger.warning(f"Calendar list-events {calendarId} failed: {result['error']}")
|
||||||
|
return []
|
||||||
|
data = _unwrapData(result)
|
||||||
|
events = data if isinstance(data, list) else data.get("events", []) if isinstance(data, dict) else []
|
||||||
|
entries: List[ExternalEntry] = []
|
||||||
|
for ev in events:
|
||||||
|
evId = str(ev.get("id") or ev.get("uid") or "")
|
||||||
|
if not evId:
|
||||||
|
continue
|
||||||
|
title = ev.get("title") or ev.get("summary") or "(no title)"
|
||||||
|
entries.append(ExternalEntry(
|
||||||
|
name=title,
|
||||||
|
path=f"/{calendarId}/{evId}",
|
||||||
|
isFolder=False,
|
||||||
|
metadata={
|
||||||
|
"id": evId,
|
||||||
|
"kind": "event",
|
||||||
|
"start": ev.get("start"),
|
||||||
|
"end": ev.get("end"),
|
||||||
|
"location": ev.get("location"),
|
||||||
|
"updated": ev.get("updated_at"),
|
||||||
|
},
|
||||||
|
))
|
||||||
|
if limit is not None:
|
||||||
|
return entries[: int(limit)]
|
||||||
|
return entries
|
||||||
|
|
||||||
|
async def download(self, path: str) -> DownloadResult:
|
||||||
|
segments = [s for s in (path or "").strip("/").split("/") if s]
|
||||||
|
if len(segments) < 2:
|
||||||
|
return DownloadResult()
|
||||||
|
eventId = segments[1]
|
||||||
|
|
||||||
|
content = await _infomaniakDownload(
|
||||||
|
self._token,
|
||||||
|
f"{_PIM_PREFIX}/event/{eventId}/export",
|
||||||
|
baseUrl=_CALENDAR_BASE,
|
||||||
|
)
|
||||||
|
if content is None:
|
||||||
|
return DownloadResult()
|
||||||
|
|
||||||
|
title = eventId
|
||||||
|
meta = await _infomaniakGet(
|
||||||
|
self._token,
|
||||||
|
f"{_PIM_PREFIX}/event/{eventId}",
|
||||||
|
baseUrl=_CALENDAR_BASE,
|
||||||
|
)
|
||||||
|
if isinstance(meta, dict) and not meta.get("error"):
|
||||||
|
unwrapped = _unwrapData(meta)
|
||||||
|
if isinstance(unwrapped, dict):
|
||||||
|
event = unwrapped.get("event") if "event" in unwrapped else unwrapped
|
||||||
|
if isinstance(event, dict):
|
||||||
|
title = event.get("title") or event.get("summary") or eventId
|
||||||
|
return DownloadResult(
|
||||||
|
data=content,
|
||||||
|
fileName=f"{_safeFileName(title, 'event')}.ics",
|
||||||
|
mimeType="text/calendar",
|
||||||
|
)
|
||||||
|
|
||||||
|
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||||
|
return {"error": "Calendar upload not yet implemented"}
|
||||||
|
|
||||||
|
async def search(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
# The PIM Calendar API has no public search endpoint we can rely on.
|
||||||
|
# Cheap fallback: list events in the current calendar (or all of
|
||||||
|
# them) within the default window and filter case-insensitively on
|
||||||
|
# title/location.
|
||||||
|
calendars = (
|
||||||
|
await self._listCalendars()
|
||||||
|
if not path
|
||||||
|
else [ExternalEntry(name="", path=path, isFolder=True)]
|
||||||
|
)
|
||||||
|
if not calendars:
|
||||||
|
return []
|
||||||
|
needle = (query or "").strip().lower()
|
||||||
|
results: List[ExternalEntry] = []
|
||||||
|
for cal in calendars:
|
||||||
|
calId = (cal.metadata or {}).get("id") or cal.path.strip("/")
|
||||||
|
for ev in await self._listEvents(calId, limit=limit):
|
||||||
|
hay = " ".join(
|
||||||
|
str(v) for v in (
|
||||||
|
ev.name,
|
||||||
|
(ev.metadata or {}).get("location") or "",
|
||||||
|
)
|
||||||
|
).lower()
|
||||||
|
if not needle or needle in hay:
|
||||||
|
results.append(ev)
|
||||||
|
if limit is not None and len(results) >= int(limit):
|
||||||
|
break
|
||||||
|
return results[: int(limit)] if limit is not None else results
|
||||||
|
|
||||||
|
|
||||||
|
def _vcardEscape(value: Any) -> str:
|
||||||
|
"""Escape a value for vCard 3.0 -- backslash, comma, semicolon, newline."""
|
||||||
|
text = "" if value is None else str(value)
|
||||||
|
return (
|
||||||
|
text.replace("\\", "\\\\")
|
||||||
|
.replace(";", "\\;")
|
||||||
|
.replace(",", "\\,")
|
||||||
|
.replace("\r\n", "\\n")
|
||||||
|
.replace("\n", "\\n")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _renderInfomaniakVcard(record: Dict[str, Any]) -> str:
|
||||||
|
"""Render an Infomaniak contact record as a vCard 3.0 string.
|
||||||
|
|
||||||
|
The Contacts PIM ``/contact/{id}/export`` endpoint is not PAT-friendly
|
||||||
|
(302s to the OAuth login page), and ``/contact/{id}`` returns 500 with
|
||||||
|
a PAT, so we cannot retrieve the canonical .vcf or detail blob from
|
||||||
|
Infomaniak. Instead we synthesize a vCard 3.0 payload from the
|
||||||
|
listing record fetched with ``with=emails,phones,addresses,details``.
|
||||||
|
|
||||||
|
vCard 3.0 is the common-denominator format universally accepted by
|
||||||
|
Outlook, Google Contacts, Apple Contacts and Thunderbird (4.0 still
|
||||||
|
has poor Outlook import compatibility).
|
||||||
|
"""
|
||||||
|
firstname = record.get("firstname") or ""
|
||||||
|
lastname = record.get("lastname") or ""
|
||||||
|
fullName = (
|
||||||
|
record.get("name")
|
||||||
|
or " ".join(p for p in (firstname, lastname) if p).strip()
|
||||||
|
or "Contact"
|
||||||
|
)
|
||||||
|
organization = record.get("organization") or ""
|
||||||
|
note = record.get("note") or ""
|
||||||
|
emails = record.get("emails") or []
|
||||||
|
phones = record.get("phones") or []
|
||||||
|
addresses = record.get("addresses") or []
|
||||||
|
websites = record.get("websites") or []
|
||||||
|
|
||||||
|
lines = ["BEGIN:VCARD", "VERSION:3.0"]
|
||||||
|
# N: Last;First;Middle;Prefix;Suffix
|
||||||
|
lines.append(f"N:{_vcardEscape(lastname)};{_vcardEscape(firstname)};;;")
|
||||||
|
lines.append(f"FN:{_vcardEscape(fullName)}")
|
||||||
|
if organization:
|
||||||
|
lines.append(f"ORG:{_vcardEscape(organization)}")
|
||||||
|
for email in emails:
|
||||||
|
if isinstance(email, str) and email:
|
||||||
|
lines.append(f"EMAIL;TYPE=INTERNET:{_vcardEscape(email)}")
|
||||||
|
elif isinstance(email, dict) and email.get("address"):
|
||||||
|
lines.append(f"EMAIL;TYPE=INTERNET:{_vcardEscape(email['address'])}")
|
||||||
|
for phone in phones:
|
||||||
|
if isinstance(phone, str) and phone:
|
||||||
|
lines.append(f"TEL:{_vcardEscape(phone)}")
|
||||||
|
elif isinstance(phone, dict) and phone.get("number"):
|
||||||
|
lines.append(f"TEL:{_vcardEscape(phone['number'])}")
|
||||||
|
for addr in addresses:
|
||||||
|
if isinstance(addr, dict):
|
||||||
|
# ADR: PO-Box;Extended;Street;City;Region;Postal;Country
|
||||||
|
lines.append(
|
||||||
|
"ADR:;;"
|
||||||
|
f"{_vcardEscape(addr.get('street'))};"
|
||||||
|
f"{_vcardEscape(addr.get('city'))};"
|
||||||
|
f"{_vcardEscape(addr.get('region'))};"
|
||||||
|
f"{_vcardEscape(addr.get('zip') or addr.get('postal_code'))};"
|
||||||
|
f"{_vcardEscape(addr.get('country'))}"
|
||||||
|
)
|
||||||
|
for site in websites:
|
||||||
|
if isinstance(site, str) and site:
|
||||||
|
lines.append(f"URL:{_vcardEscape(site)}")
|
||||||
|
elif isinstance(site, dict) and site.get("url"):
|
||||||
|
lines.append(f"URL:{_vcardEscape(site['url'])}")
|
||||||
|
if note:
|
||||||
|
lines.append(f"NOTE:{_vcardEscape(note)}")
|
||||||
|
lines.append("END:VCARD")
|
||||||
|
return "\r\n".join(lines) + "\r\n"
|
||||||
|
|
||||||
|
|
||||||
|
class ContactAdapter(ServiceAdapter):
|
||||||
|
"""Infomaniak Contacts adapter -- browse address books + contacts, .vcf download.
|
||||||
|
|
||||||
|
Uses the public PIM endpoint at ``contacts.infomaniak.com/api/pim``,
|
||||||
|
which authenticates with the PAT scope ``workspace:contact``.
|
||||||
|
|
||||||
|
Path layout:
|
||||||
|
``/`` -> list address books
|
||||||
|
``/{addressBookId}`` -> list contacts in that book
|
||||||
|
``/{addressBookId}/{contactId}`` -> single contact (download as .vcf)
|
||||||
|
|
||||||
|
Endpoint particulars:
|
||||||
|
Listing both address books and contacts is PAT-friendly. The
|
||||||
|
contact-listing call uses ``with=emails,phones,addresses,details``
|
||||||
|
so each record arrives with all the fields needed for vCard
|
||||||
|
synthesis -- Infomaniak skips them by default. Detail and export
|
||||||
|
endpoints (``/contact/{id}``, ``/contact/{id}/export``) are **not**
|
||||||
|
PAT-friendly (the former 500s, the latter 302s to OAuth), so the
|
||||||
|
``download`` path re-fetches the listing and renders the vCard
|
||||||
|
ourselves via :func:`_renderInfomaniakVcard`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_DEFAULT_CONTACT_LIMIT = 200
|
||||||
|
_MAX_CONTACT_LIMIT = 1000
|
||||||
|
_CONTACT_FIELDS = "emails,phones,addresses,details"
|
||||||
|
|
||||||
|
def __init__(self, accessToken: str):
|
||||||
|
self._token = accessToken
|
||||||
|
|
||||||
|
async def browse(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
segments = [s for s in (path or "").strip("/").split("/") if s]
|
||||||
|
if not segments:
|
||||||
|
return await self._listAddressBooks()
|
||||||
|
if len(segments) == 1:
|
||||||
|
return await self._listContacts(segments[0], limit=limit)
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def _listAddressBooks(self) -> List[ExternalEntry]:
|
||||||
|
result = await _infomaniakGet(
|
||||||
|
self._token, f"{_PIM_PREFIX}/addressbook", baseUrl=_CONTACTS_BASE
|
||||||
|
)
|
||||||
|
if isinstance(result, dict) and result.get("error"):
|
||||||
|
logger.warning(f"Contacts list-addressbooks failed: {result['error']}")
|
||||||
|
return []
|
||||||
|
data = _unwrapData(result)
|
||||||
|
books = data.get("addressbooks", []) if isinstance(data, dict) else []
|
||||||
|
entries: List[ExternalEntry] = []
|
||||||
|
for book in books:
|
||||||
|
bookId = str(book.get("id", ""))
|
||||||
|
if not bookId:
|
||||||
|
continue
|
||||||
|
isShared = bool(book.get("is_shared")) or (book.get("user_id") or 0) <= 0
|
||||||
|
# The shared organisation directory has an empty name -- give it a
|
||||||
|
# human label so the UDB tree is not blank.
|
||||||
|
name = book.get("name") or (
|
||||||
|
"Organisation" if book.get("is_dynamic_organisation_member_directory") else bookId
|
||||||
|
)
|
||||||
|
entries.append(ExternalEntry(
|
||||||
|
name=name,
|
||||||
|
path=f"/{bookId}",
|
||||||
|
isFolder=True,
|
||||||
|
metadata={
|
||||||
|
"id": bookId,
|
||||||
|
"kind": "addressbook",
|
||||||
|
"color": book.get("color"),
|
||||||
|
"shared": isShared,
|
||||||
|
"default": bool(book.get("default")),
|
||||||
|
},
|
||||||
|
))
|
||||||
|
return entries
|
||||||
|
|
||||||
|
async def _fetchContacts(
|
||||||
|
self,
|
||||||
|
addressBookId: str,
|
||||||
|
perPage: int,
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Raw listing call -- shared by browse and download."""
|
||||||
|
endpoint = (
|
||||||
|
f"{_PIM_PREFIX}/addressbook/{addressBookId}/contact"
|
||||||
|
f"?per_page={perPage}&with={self._CONTACT_FIELDS}"
|
||||||
|
)
|
||||||
|
result = await _infomaniakGet(self._token, endpoint, baseUrl=_CONTACTS_BASE)
|
||||||
|
if isinstance(result, dict) and result.get("error"):
|
||||||
|
logger.warning(
|
||||||
|
f"Contacts list-contacts {addressBookId} failed: {result['error']}"
|
||||||
|
)
|
||||||
|
return []
|
||||||
|
data = _unwrapData(result)
|
||||||
|
if isinstance(data, list):
|
||||||
|
return [c for c in data if isinstance(c, dict)]
|
||||||
|
if isinstance(data, dict):
|
||||||
|
contacts = data.get("contacts", [])
|
||||||
|
return [c for c in contacts if isinstance(c, dict)]
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def _listContacts(
|
||||||
|
self,
|
||||||
|
addressBookId: str,
|
||||||
|
limit: Optional[int],
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
effectiveLimit = self._DEFAULT_CONTACT_LIMIT if limit is None else max(
|
||||||
|
1, min(int(limit), self._MAX_CONTACT_LIMIT),
|
||||||
|
)
|
||||||
|
contacts = await self._fetchContacts(addressBookId, perPage=effectiveLimit)
|
||||||
|
entries: List[ExternalEntry] = []
|
||||||
|
for c in contacts:
|
||||||
|
cId = str(c.get("id") or c.get("uid") or "")
|
||||||
|
if not cId:
|
||||||
|
continue
|
||||||
|
firstName = c.get("firstname")
|
||||||
|
lastName = c.get("lastname")
|
||||||
|
displayName = (
|
||||||
|
c.get("name")
|
||||||
|
or " ".join(p for p in (firstName, lastName) if p).strip()
|
||||||
|
or (c.get("emails") or [None])[0]
|
||||||
|
or cId
|
||||||
|
)
|
||||||
|
firstEmail = (c.get("emails") or [None])[0]
|
||||||
|
firstPhone = (c.get("phones") or [None])[0]
|
||||||
|
entries.append(ExternalEntry(
|
||||||
|
name=str(displayName),
|
||||||
|
path=f"/{addressBookId}/{cId}",
|
||||||
|
isFolder=False,
|
||||||
|
metadata={
|
||||||
|
"id": cId,
|
||||||
|
"kind": "contact",
|
||||||
|
"email": firstEmail,
|
||||||
|
"phone": firstPhone,
|
||||||
|
"organization": c.get("organization"),
|
||||||
|
},
|
||||||
|
))
|
||||||
|
return entries
|
||||||
|
|
||||||
|
async def download(self, path: str) -> DownloadResult:
|
||||||
|
segments = [s for s in (path or "").strip("/").split("/") if s]
|
||||||
|
if len(segments) < 2:
|
||||||
|
return DownloadResult()
|
||||||
|
addressBookId, contactId = segments[0], segments[1]
|
||||||
|
|
||||||
|
# The PIM contact-detail endpoint (``/contact/{id}``) returns 500
|
||||||
|
# against a PAT, and ``/contact/{id}/export`` 302s to OAuth. We
|
||||||
|
# therefore re-fetch the listing (which IS PAT-friendly) with all
|
||||||
|
# vCard-relevant fields, then synthesize the .vcf ourselves.
|
||||||
|
contacts = await self._fetchContacts(
|
||||||
|
addressBookId, perPage=self._MAX_CONTACT_LIMIT
|
||||||
|
)
|
||||||
|
record = next((c for c in contacts if str(c.get("id")) == contactId), None)
|
||||||
|
if record is None:
|
||||||
|
logger.warning(
|
||||||
|
f"Contacts download: contact {contactId} not found in book "
|
||||||
|
f"{addressBookId}"
|
||||||
|
)
|
||||||
|
return DownloadResult()
|
||||||
|
|
||||||
|
firstName = record.get("firstname") or ""
|
||||||
|
lastName = record.get("lastname") or ""
|
||||||
|
displayName = (
|
||||||
|
record.get("name")
|
||||||
|
or " ".join(p for p in (firstName, lastName) if p).strip()
|
||||||
|
or contactId
|
||||||
|
)
|
||||||
|
vcardText = _renderInfomaniakVcard(record)
|
||||||
|
return DownloadResult(
|
||||||
|
data=vcardText.encode("utf-8"),
|
||||||
|
fileName=f"{_safeFileName(displayName, 'contact')}.vcf",
|
||||||
|
mimeType="text/vcard",
|
||||||
|
)
|
||||||
|
|
||||||
|
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||||
|
return {"error": "Contacts upload not yet implemented"}
|
||||||
|
|
||||||
|
async def search(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
# No public search endpoint -- list contacts of the current (or all)
|
||||||
|
# address books and filter client-side on display name / email.
|
||||||
|
books = (
|
||||||
|
await self._listAddressBooks()
|
||||||
|
if not path
|
||||||
|
else [ExternalEntry(name="", path=path, isFolder=True)]
|
||||||
|
)
|
||||||
|
if not books:
|
||||||
|
return []
|
||||||
|
needle = (query or "").strip().lower()
|
||||||
|
results: List[ExternalEntry] = []
|
||||||
|
for book in books:
|
||||||
|
bookId = (book.metadata or {}).get("id") or book.path.strip("/")
|
||||||
|
for c in await self._listContacts(bookId, limit=limit):
|
||||||
|
hay = " ".join(
|
||||||
|
str(v) for v in (
|
||||||
|
c.name,
|
||||||
|
(c.metadata or {}).get("email") or "",
|
||||||
|
(c.metadata or {}).get("organization") or "",
|
||||||
|
)
|
||||||
|
).lower()
|
||||||
|
if not needle or needle in hay:
|
||||||
|
results.append(c)
|
||||||
|
if limit is not None and len(results) >= int(limit):
|
||||||
|
break
|
||||||
|
return results[: int(limit)] if limit is not None else results
|
||||||
|
|
||||||
|
|
||||||
|
class InfomaniakConnector(ProviderConnector):
|
||||||
|
"""Infomaniak ProviderConnector -- kDrive + Calendar + Contacts today.
|
||||||
|
|
||||||
|
Mail is reserved on the PAT (scope ``workspace:mail``) but not wired
|
||||||
|
up here yet -- Infomaniak has no public PAT-friendly Mail endpoint
|
||||||
|
today (the PIM Mail routes 302 to OAuth, the legacy ``/api/mail`` route
|
||||||
|
301-redirects to an internal Cyrus port). Once a working endpoint is
|
||||||
|
found, the corresponding adapter can be slotted into ``_SERVICE_MAP``
|
||||||
|
without any token rotation on the user side.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_SERVICE_MAP = {
|
||||||
|
"kdrive": KdriveAdapter,
|
||||||
|
"calendar": CalendarAdapter,
|
||||||
|
"contact": ContactAdapter,
|
||||||
|
}
|
||||||
|
|
||||||
|
def getAvailableServices(self) -> List[str]:
|
||||||
|
return list(self._SERVICE_MAP.keys())
|
||||||
|
|
||||||
|
def getServiceAdapter(self, service: str) -> ServiceAdapter:
|
||||||
|
adapterClass = self._SERVICE_MAP.get(service)
|
||||||
|
if not adapterClass:
|
||||||
|
raise ValueError(
|
||||||
|
f"Unknown Infomaniak service: {service}. "
|
||||||
|
f"Available: {list(self._SERVICE_MAP.keys())}"
|
||||||
|
)
|
||||||
|
return adapterClass(self.accessToken)
|
||||||
|
|
@ -126,6 +126,11 @@ def _stripGraphBase(url: str) -> str:
|
||||||
|
|
||||||
def _graphItemToExternalEntry(item: Dict[str, Any], basePath: str = "") -> ExternalEntry:
|
def _graphItemToExternalEntry(item: Dict[str, Any], basePath: str = "") -> ExternalEntry:
|
||||||
isFolder = "folder" in item
|
isFolder = "folder" in item
|
||||||
|
# Graph exposes the driveItem content hash as ``eTag`` (quoted) or
|
||||||
|
# ``cTag``; we normalise to a "revision" string so callers can use it as a
|
||||||
|
# stable ``contentVersion`` for idempotent ingestion without re-downloading
|
||||||
|
# file bytes.
|
||||||
|
revision = item.get("eTag") or item.get("cTag")
|
||||||
return ExternalEntry(
|
return ExternalEntry(
|
||||||
name=item.get("name", ""),
|
name=item.get("name", ""),
|
||||||
path=f"{basePath}/{item.get('name', '')}" if basePath else item.get("name", ""),
|
path=f"{basePath}/{item.get('name', '')}" if basePath else item.get("name", ""),
|
||||||
|
|
@ -137,6 +142,9 @@ def _graphItemToExternalEntry(item: Dict[str, Any], basePath: str = "") -> Exter
|
||||||
"id": item.get("id"),
|
"id": item.get("id"),
|
||||||
"webUrl": item.get("webUrl"),
|
"webUrl": item.get("webUrl"),
|
||||||
"childCount": item.get("folder", {}).get("childCount") if isFolder else None,
|
"childCount": item.get("folder", {}).get("childCount") if isFolder else None,
|
||||||
|
"revision": revision,
|
||||||
|
"lastModifiedDateTime": item.get("lastModifiedDateTime"),
|
||||||
|
"parentReference": item.get("parentReference", {}),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -167,21 +175,36 @@ class SharepointAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
return await self._discoverSites()
|
return await self._discoverSites()
|
||||||
|
|
||||||
if not folderPath or folderPath == "/":
|
if not folderPath or folderPath == "/":
|
||||||
endpoint = f"sites/{siteId}/drive/root/children"
|
endpoint: Optional[str] = f"sites/{siteId}/drive/root/children?$top=200"
|
||||||
else:
|
else:
|
||||||
cleanPath = folderPath.lstrip("/")
|
cleanPath = folderPath.lstrip("/")
|
||||||
endpoint = f"sites/{siteId}/drive/root:/{cleanPath}:/children"
|
endpoint = f"sites/{siteId}/drive/root:/{cleanPath}:/children?$top=200"
|
||||||
|
|
||||||
result = await self._graphGet(endpoint)
|
# Follow @odata.nextLink until a hard cap is reached so large libraries
|
||||||
if "error" in result:
|
# are fully enumerated (required for bootstrap). Per-page size uses
|
||||||
logger.warning(f"SharePoint browse failed: {result['error']}")
|
# Graph's max supported value to minimise round-trips.
|
||||||
return []
|
effectiveLimit = int(limit) if limit is not None else None
|
||||||
|
items: List[Dict[str, Any]] = []
|
||||||
|
hardCap = 5000
|
||||||
|
while endpoint and len(items) < hardCap:
|
||||||
|
result = await self._graphGet(endpoint)
|
||||||
|
if "error" in result:
|
||||||
|
logger.warning(f"SharePoint browse failed: {result['error']}")
|
||||||
|
break
|
||||||
|
for raw in result.get("value", []) or []:
|
||||||
|
items.append(raw)
|
||||||
|
if effectiveLimit is not None and len(items) >= effectiveLimit:
|
||||||
|
break
|
||||||
|
if effectiveLimit is not None and len(items) >= effectiveLimit:
|
||||||
|
break
|
||||||
|
nextLink = result.get("@odata.nextLink")
|
||||||
|
endpoint = _stripGraphBase(nextLink) if nextLink else None
|
||||||
|
|
||||||
entries = [_graphItemToExternalEntry(item, path) for item in result.get("value", [])]
|
entries = [_graphItemToExternalEntry(item, path) for item in items]
|
||||||
if filter:
|
if filter:
|
||||||
entries = [e for e in entries if _matchFilter(e, filter)]
|
entries = [e for e in entries if _matchFilter(e, filter)]
|
||||||
if limit is not None:
|
if effectiveLimit is not None:
|
||||||
entries = entries[: max(1, int(limit))]
|
entries = entries[: max(1, effectiveLimit)]
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
async def _discoverSites(self) -> List[ExternalEntry]:
|
async def _discoverSites(self) -> List[ExternalEntry]:
|
||||||
|
|
@ -841,6 +864,285 @@ class OneDriveAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Calendar Adapter
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class CalendarAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
|
"""ServiceAdapter for Outlook Calendar via Microsoft Graph.
|
||||||
|
|
||||||
|
Path conventions:
|
||||||
|
``""`` / ``"/"`` -> list user calendars
|
||||||
|
``"/<calendarId>"`` -> list events in that calendar
|
||||||
|
``"/<calendarId>/<eventId>"`` -> reserved for future event detail browse
|
||||||
|
|
||||||
|
Downloads return a synthesised ``.ics`` (VCALENDAR/VEVENT) since Microsoft
|
||||||
|
Graph does not expose a ``/$value`` endpoint for events.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_DEFAULT_EVENT_LIMIT = 100
|
||||||
|
_MAX_EVENT_LIMIT = 1000
|
||||||
|
_PAGE_SIZE = 100
|
||||||
|
|
||||||
|
async def browse(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
cleanPath = (path or "").strip("/")
|
||||||
|
if not cleanPath:
|
||||||
|
result = await self._graphGet("me/calendars?$top=100")
|
||||||
|
if "error" in result:
|
||||||
|
logger.warning(f"MSFT Calendar list failed: {result['error']}")
|
||||||
|
return []
|
||||||
|
calendars = result.get("value", [])
|
||||||
|
if filter:
|
||||||
|
calendars = [c for c in calendars if filter.lower() in (c.get("name") or "").lower()]
|
||||||
|
return [
|
||||||
|
ExternalEntry(
|
||||||
|
name=c.get("name", ""),
|
||||||
|
path=f"/{c.get('id', '')}",
|
||||||
|
isFolder=True,
|
||||||
|
metadata={
|
||||||
|
"id": c.get("id"),
|
||||||
|
"color": c.get("color"),
|
||||||
|
"owner": (c.get("owner") or {}).get("address"),
|
||||||
|
"isDefaultCalendar": c.get("isDefaultCalendar", False),
|
||||||
|
"canEdit": c.get("canEdit", False),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
for c in calendars
|
||||||
|
]
|
||||||
|
|
||||||
|
calendarId = cleanPath.split("/", 1)[0]
|
||||||
|
effectiveLimit = self._DEFAULT_EVENT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_EVENT_LIMIT))
|
||||||
|
pageSize = min(self._PAGE_SIZE, effectiveLimit)
|
||||||
|
endpoint: Optional[str] = (
|
||||||
|
f"me/calendars/{calendarId}/events"
|
||||||
|
f"?$top={pageSize}&$orderby=start/dateTime desc"
|
||||||
|
)
|
||||||
|
events: List[Dict[str, Any]] = []
|
||||||
|
while endpoint and len(events) < effectiveLimit:
|
||||||
|
result = await self._graphGet(endpoint)
|
||||||
|
if "error" in result:
|
||||||
|
logger.warning(f"MSFT Calendar events failed: {result['error']}")
|
||||||
|
break
|
||||||
|
for ev in result.get("value", []):
|
||||||
|
events.append(ev)
|
||||||
|
if len(events) >= effectiveLimit:
|
||||||
|
break
|
||||||
|
nextLink = result.get("@odata.nextLink")
|
||||||
|
endpoint = _stripGraphBase(nextLink) if nextLink else None
|
||||||
|
|
||||||
|
return [
|
||||||
|
ExternalEntry(
|
||||||
|
name=ev.get("subject", "(no subject)"),
|
||||||
|
path=f"/{calendarId}/{ev.get('id', '')}",
|
||||||
|
isFolder=False,
|
||||||
|
mimeType="text/calendar",
|
||||||
|
metadata={
|
||||||
|
"id": ev.get("id"),
|
||||||
|
"start": (ev.get("start") or {}).get("dateTime"),
|
||||||
|
"end": (ev.get("end") or {}).get("dateTime"),
|
||||||
|
"location": (ev.get("location") or {}).get("displayName"),
|
||||||
|
"organizer": (ev.get("organizer") or {}).get("emailAddress", {}).get("address"),
|
||||||
|
"isAllDay": ev.get("isAllDay", False),
|
||||||
|
"webLink": ev.get("webLink"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
for ev in events
|
||||||
|
]
|
||||||
|
|
||||||
|
async def download(self, path: str) -> DownloadResult:
|
||||||
|
cleanPath = (path or "").strip("/")
|
||||||
|
if "/" not in cleanPath:
|
||||||
|
return DownloadResult()
|
||||||
|
eventId = cleanPath.split("/")[-1]
|
||||||
|
ev = await self._graphGet(f"me/events/{eventId}")
|
||||||
|
if "error" in ev:
|
||||||
|
logger.warning(f"MSFT Calendar event fetch failed: {ev['error']}")
|
||||||
|
return DownloadResult()
|
||||||
|
icsBytes = _eventToIcs(ev)
|
||||||
|
subject = ev.get("subject") or eventId
|
||||||
|
safeName = _safeFileName(subject) or "event"
|
||||||
|
return DownloadResult(
|
||||||
|
data=icsBytes,
|
||||||
|
fileName=f"{safeName}.ics",
|
||||||
|
mimeType="text/calendar",
|
||||||
|
)
|
||||||
|
|
||||||
|
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||||
|
return {"error": "Calendar upload not supported"}
|
||||||
|
|
||||||
|
async def search(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
safeQuery = query.replace("'", "''")
|
||||||
|
effectiveLimit = self._DEFAULT_EVENT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_EVENT_LIMIT))
|
||||||
|
endpoint = f"me/events?$search=\"{safeQuery}\"&$top={effectiveLimit}"
|
||||||
|
result = await self._graphGet(endpoint)
|
||||||
|
if "error" in result:
|
||||||
|
return []
|
||||||
|
return [
|
||||||
|
ExternalEntry(
|
||||||
|
name=ev.get("subject", "(no subject)"),
|
||||||
|
path=f"/search/{ev.get('id', '')}",
|
||||||
|
isFolder=False,
|
||||||
|
mimeType="text/calendar",
|
||||||
|
metadata={
|
||||||
|
"id": ev.get("id"),
|
||||||
|
"start": (ev.get("start") or {}).get("dateTime"),
|
||||||
|
"end": (ev.get("end") or {}).get("dateTime"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
for ev in result.get("value", [])
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Contacts Adapter
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class ContactsAdapter(_GraphApiMixin, ServiceAdapter):
|
||||||
|
"""ServiceAdapter for Outlook Contacts via Microsoft Graph.
|
||||||
|
|
||||||
|
Path conventions:
|
||||||
|
``""`` -> list contact folders (default + custom)
|
||||||
|
``"/<folderId>"`` -> list contacts in that folder; the
|
||||||
|
virtual id ``default`` maps to
|
||||||
|
``/me/contacts`` (the user's primary
|
||||||
|
contact list)
|
||||||
|
``"/<folderId>/<contactId>"`` -> reserved for future detail browse
|
||||||
|
|
||||||
|
Downloads return a synthesised vCard 3.0 (.vcf) since Microsoft Graph
|
||||||
|
does not expose a ``/$value`` endpoint for contacts.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_DEFAULT_CONTACT_LIMIT = 200
|
||||||
|
_MAX_CONTACT_LIMIT = 1000
|
||||||
|
_PAGE_SIZE = 100
|
||||||
|
_DEFAULT_FOLDER_ID = "default"
|
||||||
|
|
||||||
|
async def browse(
|
||||||
|
self,
|
||||||
|
path: str,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
cleanPath = (path or "").strip("/")
|
||||||
|
if not cleanPath:
|
||||||
|
folders: List[ExternalEntry] = [
|
||||||
|
ExternalEntry(
|
||||||
|
name="Kontakte",
|
||||||
|
path=f"/{self._DEFAULT_FOLDER_ID}",
|
||||||
|
isFolder=True,
|
||||||
|
metadata={"id": self._DEFAULT_FOLDER_ID, "isDefault": True},
|
||||||
|
),
|
||||||
|
]
|
||||||
|
result = await self._graphGet("me/contactFolders?$top=100")
|
||||||
|
if "error" not in result:
|
||||||
|
for f in result.get("value", []):
|
||||||
|
folders.append(
|
||||||
|
ExternalEntry(
|
||||||
|
name=f.get("displayName", ""),
|
||||||
|
path=f"/{f.get('id', '')}",
|
||||||
|
isFolder=True,
|
||||||
|
metadata={"id": f.get("id"), "parentFolderId": f.get("parentFolderId")},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.warning(f"MSFT contactFolders list failed: {result['error']}")
|
||||||
|
return folders
|
||||||
|
|
||||||
|
folderId = cleanPath.split("/", 1)[0]
|
||||||
|
effectiveLimit = self._DEFAULT_CONTACT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_CONTACT_LIMIT))
|
||||||
|
pageSize = min(self._PAGE_SIZE, effectiveLimit)
|
||||||
|
if folderId == self._DEFAULT_FOLDER_ID:
|
||||||
|
endpoint: Optional[str] = f"me/contacts?$top={pageSize}&$orderby=displayName"
|
||||||
|
else:
|
||||||
|
endpoint = f"me/contactFolders/{folderId}/contacts?$top={pageSize}&$orderby=displayName"
|
||||||
|
|
||||||
|
contacts: List[Dict[str, Any]] = []
|
||||||
|
while endpoint and len(contacts) < effectiveLimit:
|
||||||
|
result = await self._graphGet(endpoint)
|
||||||
|
if "error" in result:
|
||||||
|
logger.warning(f"MSFT contacts list failed: {result['error']}")
|
||||||
|
break
|
||||||
|
for c in result.get("value", []):
|
||||||
|
contacts.append(c)
|
||||||
|
if len(contacts) >= effectiveLimit:
|
||||||
|
break
|
||||||
|
nextLink = result.get("@odata.nextLink")
|
||||||
|
endpoint = _stripGraphBase(nextLink) if nextLink else None
|
||||||
|
|
||||||
|
return [
|
||||||
|
ExternalEntry(
|
||||||
|
name=c.get("displayName") or _personLabel(c) or "(no name)",
|
||||||
|
path=f"/{folderId}/{c.get('id', '')}",
|
||||||
|
isFolder=False,
|
||||||
|
mimeType="text/vcard",
|
||||||
|
metadata={
|
||||||
|
"id": c.get("id"),
|
||||||
|
"givenName": c.get("givenName"),
|
||||||
|
"surname": c.get("surname"),
|
||||||
|
"companyName": c.get("companyName"),
|
||||||
|
"emailAddresses": [e.get("address") for e in (c.get("emailAddresses") or []) if e.get("address")],
|
||||||
|
"businessPhones": c.get("businessPhones") or [],
|
||||||
|
"mobilePhone": c.get("mobilePhone"),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
for c in contacts
|
||||||
|
]
|
||||||
|
|
||||||
|
async def download(self, path: str) -> DownloadResult:
|
||||||
|
cleanPath = (path or "").strip("/")
|
||||||
|
if "/" not in cleanPath:
|
||||||
|
return DownloadResult()
|
||||||
|
contactId = cleanPath.split("/")[-1]
|
||||||
|
c = await self._graphGet(f"me/contacts/{contactId}")
|
||||||
|
if "error" in c:
|
||||||
|
logger.warning(f"MSFT contact fetch failed: {c['error']}")
|
||||||
|
return DownloadResult()
|
||||||
|
vcfBytes = _contactToVcard(c)
|
||||||
|
label = c.get("displayName") or _personLabel(c) or contactId
|
||||||
|
safeName = _safeFileName(label) or "contact"
|
||||||
|
return DownloadResult(
|
||||||
|
data=vcfBytes,
|
||||||
|
fileName=f"{safeName}.vcf",
|
||||||
|
mimeType="text/vcard",
|
||||||
|
)
|
||||||
|
|
||||||
|
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||||
|
return {"error": "Contacts upload not supported"}
|
||||||
|
|
||||||
|
async def search(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
path: Optional[str] = None,
|
||||||
|
limit: Optional[int] = None,
|
||||||
|
) -> List[ExternalEntry]:
|
||||||
|
safeQuery = query.replace("'", "''")
|
||||||
|
effectiveLimit = self._DEFAULT_CONTACT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_CONTACT_LIMIT))
|
||||||
|
endpoint = f"me/contacts?$search=\"{safeQuery}\"&$top={effectiveLimit}"
|
||||||
|
result = await self._graphGet(endpoint)
|
||||||
|
if "error" in result:
|
||||||
|
return []
|
||||||
|
return [
|
||||||
|
ExternalEntry(
|
||||||
|
name=c.get("displayName") or _personLabel(c) or "(no name)",
|
||||||
|
path=f"/search/{c.get('id', '')}",
|
||||||
|
isFolder=False,
|
||||||
|
mimeType="text/vcard",
|
||||||
|
metadata={"id": c.get("id")},
|
||||||
|
)
|
||||||
|
for c in result.get("value", [])
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# MsftConnector (1:n)
|
# MsftConnector (1:n)
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
@ -853,6 +1155,8 @@ class MsftConnector(ProviderConnector):
|
||||||
"outlook": OutlookAdapter,
|
"outlook": OutlookAdapter,
|
||||||
"teams": TeamsAdapter,
|
"teams": TeamsAdapter,
|
||||||
"onedrive": OneDriveAdapter,
|
"onedrive": OneDriveAdapter,
|
||||||
|
"calendar": CalendarAdapter,
|
||||||
|
"contact": ContactsAdapter,
|
||||||
}
|
}
|
||||||
|
|
||||||
def getAvailableServices(self) -> List[str]:
|
def getAvailableServices(self) -> List[str]:
|
||||||
|
|
@ -891,3 +1195,143 @@ def _matchFilter(entry: ExternalEntry, pattern: str) -> bool:
|
||||||
"""Simple glob-like filter (supports * wildcard)."""
|
"""Simple glob-like filter (supports * wildcard)."""
|
||||||
import fnmatch
|
import fnmatch
|
||||||
return fnmatch.fnmatch(entry.name.lower(), pattern.lower())
|
return fnmatch.fnmatch(entry.name.lower(), pattern.lower())
|
||||||
|
|
||||||
|
|
||||||
|
def _safeFileName(name: str) -> str:
|
||||||
|
"""Strip path-unsafe characters and trim length so the result is a usable file name."""
|
||||||
|
import re
|
||||||
|
return re.sub(r'[<>:"/\\|?*\x00-\x1f]', "_", name or "")[:80].strip(". ")
|
||||||
|
|
||||||
|
|
||||||
|
def _personLabel(contact: Dict[str, Any]) -> str:
|
||||||
|
given = (contact.get("givenName") or "").strip()
|
||||||
|
surname = (contact.get("surname") or "").strip()
|
||||||
|
if given or surname:
|
||||||
|
return f"{given} {surname}".strip()
|
||||||
|
company = (contact.get("companyName") or "").strip()
|
||||||
|
return company
|
||||||
|
|
||||||
|
|
||||||
|
def _icsEscape(value: str) -> str:
|
||||||
|
"""Escape RFC 5545 reserved characters in TEXT properties."""
|
||||||
|
if value is None:
|
||||||
|
return ""
|
||||||
|
return (
|
||||||
|
value.replace("\\", "\\\\")
|
||||||
|
.replace(";", "\\;")
|
||||||
|
.replace(",", "\\,")
|
||||||
|
.replace("\r\n", "\\n")
|
||||||
|
.replace("\n", "\\n")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _icsDateTime(value: Optional[str]) -> Optional[str]:
|
||||||
|
"""Convert an ISO datetime string to an RFC 5545 DATE-TIME value (UTC)."""
|
||||||
|
if not value:
|
||||||
|
return None
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
try:
|
||||||
|
normalized = value.replace("Z", "+00:00") if value.endswith("Z") else value
|
||||||
|
dt = datetime.fromisoformat(normalized)
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=timezone.utc)
|
||||||
|
return dt.astimezone(timezone.utc).strftime("%Y%m%dT%H%M%SZ")
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _eventToIcs(event: Dict[str, Any]) -> bytes:
|
||||||
|
"""Build a minimal RFC 5545 VCALENDAR/VEVENT for a Graph event payload."""
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
uid = event.get("iCalUId") or event.get("id") or "unknown@poweron"
|
||||||
|
summary = _icsEscape(event.get("subject") or "")
|
||||||
|
location = _icsEscape((event.get("location") or {}).get("displayName") or "")
|
||||||
|
body = (event.get("body") or {}).get("content") or ""
|
||||||
|
description = _icsEscape(body)
|
||||||
|
dtstart = _icsDateTime((event.get("start") or {}).get("dateTime"))
|
||||||
|
dtend = _icsDateTime((event.get("end") or {}).get("dateTime"))
|
||||||
|
dtstamp = _icsDateTime(event.get("lastModifiedDateTime")) or datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%SZ")
|
||||||
|
|
||||||
|
lines = [
|
||||||
|
"BEGIN:VCALENDAR",
|
||||||
|
"VERSION:2.0",
|
||||||
|
"PRODID:-//PowerOn//MSFT-Calendar-Adapter//EN",
|
||||||
|
"CALSCALE:GREGORIAN",
|
||||||
|
"BEGIN:VEVENT",
|
||||||
|
f"UID:{uid}",
|
||||||
|
f"DTSTAMP:{dtstamp}",
|
||||||
|
]
|
||||||
|
if dtstart:
|
||||||
|
lines.append(f"DTSTART:{dtstart}")
|
||||||
|
if dtend:
|
||||||
|
lines.append(f"DTEND:{dtend}")
|
||||||
|
if summary:
|
||||||
|
lines.append(f"SUMMARY:{summary}")
|
||||||
|
if location:
|
||||||
|
lines.append(f"LOCATION:{location}")
|
||||||
|
if description:
|
||||||
|
lines.append(f"DESCRIPTION:{description}")
|
||||||
|
organizer = (event.get("organizer") or {}).get("emailAddress", {}).get("address")
|
||||||
|
if organizer:
|
||||||
|
lines.append(f"ORGANIZER:mailto:{organizer}")
|
||||||
|
for att in (event.get("attendees") or []):
|
||||||
|
addr = (att.get("emailAddress") or {}).get("address")
|
||||||
|
if addr:
|
||||||
|
lines.append(f"ATTENDEE:mailto:{addr}")
|
||||||
|
lines.append("END:VEVENT")
|
||||||
|
lines.append("END:VCALENDAR")
|
||||||
|
return ("\r\n".join(lines) + "\r\n").encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def _contactToVcard(contact: Dict[str, Any]) -> bytes:
|
||||||
|
"""Build a vCard 3.0 from a Graph /me/contacts payload."""
|
||||||
|
given = contact.get("givenName") or ""
|
||||||
|
surname = contact.get("surname") or ""
|
||||||
|
middle = contact.get("middleName") or ""
|
||||||
|
fn = contact.get("displayName") or _personLabel(contact) or contact.get("companyName") or ""
|
||||||
|
|
||||||
|
lines = [
|
||||||
|
"BEGIN:VCARD",
|
||||||
|
"VERSION:3.0",
|
||||||
|
f"N:{surname};{given};{middle};;",
|
||||||
|
f"FN:{fn}",
|
||||||
|
]
|
||||||
|
if contact.get("companyName"):
|
||||||
|
org = contact["companyName"]
|
||||||
|
if contact.get("department"):
|
||||||
|
org = f"{org};{contact['department']}"
|
||||||
|
lines.append(f"ORG:{org}")
|
||||||
|
if contact.get("jobTitle"):
|
||||||
|
lines.append(f"TITLE:{contact['jobTitle']}")
|
||||||
|
for em in (contact.get("emailAddresses") or []):
|
||||||
|
addr = em.get("address")
|
||||||
|
if addr:
|
||||||
|
lines.append(f"EMAIL;TYPE=INTERNET:{addr}")
|
||||||
|
for phone in (contact.get("businessPhones") or []):
|
||||||
|
if phone:
|
||||||
|
lines.append(f"TEL;TYPE=WORK,VOICE:{phone}")
|
||||||
|
if contact.get("mobilePhone"):
|
||||||
|
lines.append(f"TEL;TYPE=CELL,VOICE:{contact['mobilePhone']}")
|
||||||
|
for phone in (contact.get("homePhones") or []):
|
||||||
|
if phone:
|
||||||
|
lines.append(f"TEL;TYPE=HOME,VOICE:{phone}")
|
||||||
|
|
||||||
|
def _appendAddress(addr: Dict[str, Any], typ: str) -> None:
|
||||||
|
if not addr:
|
||||||
|
return
|
||||||
|
street = addr.get("street") or ""
|
||||||
|
city = addr.get("city") or ""
|
||||||
|
state = addr.get("state") or ""
|
||||||
|
postal = addr.get("postalCode") or ""
|
||||||
|
country = addr.get("countryOrRegion") or ""
|
||||||
|
if any([street, city, state, postal, country]):
|
||||||
|
lines.append(f"ADR;TYPE={typ}:;;{street};{city};{state};{postal};{country}")
|
||||||
|
|
||||||
|
_appendAddress(contact.get("businessAddress") or {}, "WORK")
|
||||||
|
_appendAddress(contact.get("homeAddress") or {}, "HOME")
|
||||||
|
_appendAddress(contact.get("otherAddress") or {}, "OTHER")
|
||||||
|
if contact.get("personalNotes"):
|
||||||
|
lines.append(f"NOTE:{_icsEscape(contact['personalNotes'])}")
|
||||||
|
lines.append(f"UID:{contact.get('id', '')}")
|
||||||
|
lines.append("END:VCARD")
|
||||||
|
return ("\r\n".join(lines) + "\r\n").encode("utf-8")
|
||||||
|
|
|
||||||
|
|
@ -125,7 +125,7 @@ class AiModel(BaseModel):
|
||||||
|
|
||||||
# Metadata
|
# Metadata
|
||||||
version: Optional[str] = Field(default=None, description="Model version")
|
version: Optional[str] = Field(default=None, description="Model version")
|
||||||
lastUpdated: Optional[str] = Field(default=None, description="Last update timestamp")
|
lastUpdated: Optional[float] = Field(default=None, description="Last update timestamp (UTC unix)", json_schema_extra={"frontend_type": "timestamp"})
|
||||||
|
|
||||||
model_config = ConfigDict(arbitrary_types_allowed=True) # Allow Callable type
|
model_config = ConfigDict(arbitrary_types_allowed=True) # Allow Callable type
|
||||||
|
|
||||||
|
|
@ -162,6 +162,7 @@ class AiCallOptions(BaseModel):
|
||||||
|
|
||||||
# Provider filtering (from UI multiselect or automation config)
|
# Provider filtering (from UI multiselect or automation config)
|
||||||
allowedProviders: Optional[List[str]] = Field(default=None, description="List of allowed AI providers to use (empty = all RBAC-permitted)")
|
allowedProviders: Optional[List[str]] = Field(default=None, description="List of allowed AI providers to use (empty = all RBAC-permitted)")
|
||||||
|
allowedModels: Optional[List[str]] = Field(default=None, description="Whitelist of allowed model names (AND-filter with allowedProviders). None/empty = all allowed.")
|
||||||
|
|
||||||
|
|
||||||
class AiCallRequest(BaseModel):
|
class AiCallRequest(BaseModel):
|
||||||
|
|
|
||||||
|
|
@ -34,7 +34,7 @@ class AiAuditLogEntry(BaseModel):
|
||||||
|
|
||||||
userId: str = Field(
|
userId: str = Field(
|
||||||
description="ID of the user who triggered the AI call",
|
description="ID of the user who triggered the AI call",
|
||||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||||
)
|
)
|
||||||
username: Optional[str] = Field(
|
username: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
|
|
@ -43,17 +43,17 @@ class AiAuditLogEntry(BaseModel):
|
||||||
)
|
)
|
||||||
mandateId: str = Field(
|
mandateId: str = Field(
|
||||||
description="Mandate context of the call",
|
description="Mandate context of the call",
|
||||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
featureInstanceId: Optional[str] = Field(
|
featureInstanceId: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Feature instance context",
|
description="Feature instance context",
|
||||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
featureCode: Optional[str] = Field(
|
featureCode: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Feature code (e.g. workspace, trustee)",
|
description="Feature code (e.g. workspace, trustee)",
|
||||||
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
|
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"}},
|
||||||
)
|
)
|
||||||
instanceLabel: Optional[str] = Field(
|
instanceLabel: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
|
|
|
||||||
|
|
@ -100,7 +100,7 @@ class AuditLogEntry(BaseModel):
|
||||||
timestamp: float = Field(
|
timestamp: float = Field(
|
||||||
default_factory=getUtcTimestamp,
|
default_factory=getUtcTimestamp,
|
||||||
description="UTC timestamp when the event occurred",
|
description="UTC timestamp when the event occurred",
|
||||||
json_schema_extra={"label": "Zeitstempel", "frontend_type": "datetime", "frontend_readonly": True, "frontend_required": True}
|
json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": True}
|
||||||
)
|
)
|
||||||
|
|
||||||
# Actor identification
|
# Actor identification
|
||||||
|
|
@ -111,7 +111,7 @@ class AuditLogEntry(BaseModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -130,7 +130,7 @@ class AuditLogEntry(BaseModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -142,7 +142,7 @@ class AuditLogEntry(BaseModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -193,7 +193,13 @@ class AuditLogEntry(BaseModel):
|
||||||
success: bool = Field(
|
success: bool = Field(
|
||||||
default=True,
|
default=True,
|
||||||
description="Whether the action was successful",
|
description="Whether the action was successful",
|
||||||
json_schema_extra={"label": "Erfolgreich", "frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": True}
|
json_schema_extra={
|
||||||
|
"label": "Erfolgreich",
|
||||||
|
"frontend_type": "checkbox",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": True,
|
||||||
|
"frontend_format_labels": ["OK", "-", "Fehler"],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
errorMessage: Optional[str] = Field(
|
errorMessage: Optional[str] = Field(
|
||||||
|
|
|
||||||
|
|
@ -64,7 +64,7 @@ class BackgroundJob(PowerOnModel):
|
||||||
description="Mandate scope (used for access checks). None for system-wide jobs.",
|
description="Mandate scope (used for access checks). None for system-wide jobs.",
|
||||||
json_schema_extra={
|
json_schema_extra={
|
||||||
"label": "Mandanten-ID",
|
"label": "Mandanten-ID",
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
featureInstanceId: Optional[str] = Field(
|
featureInstanceId: Optional[str] = Field(
|
||||||
|
|
@ -72,7 +72,7 @@ class BackgroundJob(PowerOnModel):
|
||||||
description="Feature instance scope (optional)",
|
description="Feature instance scope (optional)",
|
||||||
json_schema_extra={
|
json_schema_extra={
|
||||||
"label": "Feature-Instanz",
|
"label": "Feature-Instanz",
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
triggeredBy: Optional[str] = Field(
|
triggeredBy: Optional[str] = Field(
|
||||||
|
|
@ -113,18 +113,18 @@ class BackgroundJob(PowerOnModel):
|
||||||
json_schema_extra={"label": "Fehler"},
|
json_schema_extra={"label": "Fehler"},
|
||||||
)
|
)
|
||||||
|
|
||||||
createdAt: datetime = Field(
|
createdAt: float = Field(
|
||||||
default_factory=lambda: datetime.now(timezone.utc),
|
default_factory=lambda: datetime.now(timezone.utc).timestamp(),
|
||||||
description="When the job was submitted",
|
description="When the job was submitted (UTC unix)",
|
||||||
json_schema_extra={"label": "Eingereicht"},
|
json_schema_extra={"label": "Eingereicht", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
startedAt: Optional[datetime] = Field(
|
startedAt: Optional[float] = Field(
|
||||||
None,
|
None,
|
||||||
description="When the handler began running",
|
description="When the handler began running (UTC unix)",
|
||||||
json_schema_extra={"label": "Gestartet"},
|
json_schema_extra={"label": "Gestartet", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
finishedAt: Optional[datetime] = Field(
|
finishedAt: Optional[float] = Field(
|
||||||
None,
|
None,
|
||||||
description="When the handler reached a terminal status",
|
description="When the handler reached a terminal status (UTC unix)",
|
||||||
json_schema_extra={"label": "Beendet"},
|
json_schema_extra={"label": "Beendet", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -8,12 +8,12 @@ from pydantic import BaseModel, Field
|
||||||
|
|
||||||
from modules.shared.i18nRegistry import i18nModel
|
from modules.shared.i18nRegistry import i18nModel
|
||||||
|
|
||||||
_MODEL_REGISTRY: Dict[str, Type["PowerOnModel"]] = {}
|
MODEL_REGISTRY: Dict[str, Type["PowerOnModel"]] = {}
|
||||||
|
|
||||||
|
|
||||||
def _getModelByTableName(tableName: str) -> Optional[Type["PowerOnModel"]]:
|
def _getModelByTableName(tableName: str) -> Optional[Type["PowerOnModel"]]:
|
||||||
"""Look up a PowerOnModel subclass by its table name (= class name)."""
|
"""Look up a PowerOnModel subclass by its table name (= class name)."""
|
||||||
return _MODEL_REGISTRY.get(tableName)
|
return MODEL_REGISTRY.get(tableName)
|
||||||
|
|
||||||
|
|
||||||
@i18nModel("Basisdatensatz")
|
@i18nModel("Basisdatensatz")
|
||||||
|
|
@ -22,7 +22,7 @@ class PowerOnModel(BaseModel):
|
||||||
|
|
||||||
def __init_subclass__(cls, **kwargs):
|
def __init_subclass__(cls, **kwargs):
|
||||||
super().__init_subclass__(**kwargs)
|
super().__init_subclass__(**kwargs)
|
||||||
_MODEL_REGISTRY[cls.__name__] = cls
|
MODEL_REGISTRY[cls.__name__] = cls
|
||||||
|
|
||||||
sysCreatedAt: Optional[float] = Field(
|
sysCreatedAt: Optional[float] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
|
|
@ -46,6 +46,7 @@ class PowerOnModel(BaseModel):
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"frontend_visible": False,
|
"frontend_visible": False,
|
||||||
"system": True,
|
"system": True,
|
||||||
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
sysModifiedAt: Optional[float] = Field(
|
sysModifiedAt: Optional[float] = Field(
|
||||||
|
|
@ -70,5 +71,6 @@ class PowerOnModel(BaseModel):
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"frontend_visible": False,
|
"frontend_visible": False,
|
||||||
"system": True,
|
"system": True,
|
||||||
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -49,12 +49,12 @@ class BillingAccount(PowerOnModel):
|
||||||
mandateId: str = Field(
|
mandateId: str = Field(
|
||||||
...,
|
...,
|
||||||
description="Foreign key to Mandate",
|
description="Foreign key to Mandate",
|
||||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
userId: Optional[str] = Field(
|
userId: Optional[str] = Field(
|
||||||
None,
|
None,
|
||||||
description="Foreign key to User (None = mandate pool account, set = user audit account)",
|
description="Foreign key to User (None = mandate pool account, set = user audit account)",
|
||||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||||
)
|
)
|
||||||
balance: float = Field(default=0.0, description="Current balance in CHF", json_schema_extra={"label": "Guthaben (CHF)"})
|
balance: float = Field(default=0.0, description="Current balance in CHF", json_schema_extra={"label": "Guthaben (CHF)"})
|
||||||
warningThreshold: float = Field(
|
warningThreshold: float = Field(
|
||||||
|
|
@ -62,10 +62,10 @@ class BillingAccount(PowerOnModel):
|
||||||
description="Warning threshold in CHF",
|
description="Warning threshold in CHF",
|
||||||
json_schema_extra={"label": "Warnschwelle (CHF)"},
|
json_schema_extra={"label": "Warnschwelle (CHF)"},
|
||||||
)
|
)
|
||||||
lastWarningAt: Optional[datetime] = Field(
|
lastWarningAt: Optional[float] = Field(
|
||||||
None,
|
None,
|
||||||
description="Last warning sent timestamp",
|
description="Last warning sent timestamp (UTC unix)",
|
||||||
json_schema_extra={"label": "Letzte Warnung"},
|
json_schema_extra={"label": "Letzte Warnung", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
enabled: bool = Field(default=True, description="Account is active", json_schema_extra={"label": "Aktiv"})
|
enabled: bool = Field(default=True, description="Account is active", json_schema_extra={"label": "Aktiv"})
|
||||||
|
|
||||||
|
|
@ -81,7 +81,7 @@ class BillingTransaction(PowerOnModel):
|
||||||
accountId: str = Field(
|
accountId: str = Field(
|
||||||
...,
|
...,
|
||||||
description="Foreign key to BillingAccount",
|
description="Foreign key to BillingAccount",
|
||||||
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount"}},
|
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount", "labelField": None}},
|
||||||
)
|
)
|
||||||
transactionType: TransactionTypeEnum = Field(..., description="Transaction type", json_schema_extra={"label": "Typ"})
|
transactionType: TransactionTypeEnum = Field(..., description="Transaction type", json_schema_extra={"label": "Typ"})
|
||||||
amount: float = Field(..., description="Amount in CHF (always positive)", json_schema_extra={"label": "Betrag (CHF)"})
|
amount: float = Field(..., description="Amount in CHF (always positive)", json_schema_extra={"label": "Betrag (CHF)"})
|
||||||
|
|
@ -100,19 +100,19 @@ class BillingTransaction(PowerOnModel):
|
||||||
featureInstanceId: Optional[str] = Field(
|
featureInstanceId: Optional[str] = Field(
|
||||||
None,
|
None,
|
||||||
description="Feature instance ID",
|
description="Feature instance ID",
|
||||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
featureCode: Optional[str] = Field(
|
featureCode: Optional[str] = Field(
|
||||||
None,
|
None,
|
||||||
description="Feature code (e.g., automation)",
|
description="Feature code (e.g., automation)",
|
||||||
json_schema_extra={"label": "Feature-Code", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
|
json_schema_extra={"label": "Feature-Code", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"}},
|
||||||
)
|
)
|
||||||
aicoreProvider: Optional[str] = Field(None, description="AICore provider (anthropic, openai, etc.)", json_schema_extra={"label": "AI-Anbieter"})
|
aicoreProvider: Optional[str] = Field(None, description="AICore provider (anthropic, openai, etc.)", json_schema_extra={"label": "AI-Anbieter"})
|
||||||
aicoreModel: Optional[str] = Field(None, description="AICore model name (e.g., claude-4-sonnet, gpt-4o)", json_schema_extra={"label": "AI-Modell"})
|
aicoreModel: Optional[str] = Field(None, description="AICore model name (e.g., claude-4-sonnet, gpt-4o)", json_schema_extra={"label": "AI-Modell"})
|
||||||
createdByUserId: Optional[str] = Field(
|
createdByUserId: Optional[str] = Field(
|
||||||
None,
|
None,
|
||||||
description="User who created/caused this transaction",
|
description="User who created/caused this transaction",
|
||||||
json_schema_extra={"label": "Erstellt von Benutzer", "fk_target": {"db": "poweron_app", "table": "User"}},
|
json_schema_extra={"label": "Erstellt von Benutzer", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||||
)
|
)
|
||||||
|
|
||||||
# AI call metadata (for per-call analytics)
|
# AI call metadata (for per-call analytics)
|
||||||
|
|
@ -133,7 +133,7 @@ class BillingSettings(BaseModel):
|
||||||
mandateId: str = Field(
|
mandateId: str = Field(
|
||||||
...,
|
...,
|
||||||
description="Foreign key to Mandate (UNIQUE)",
|
description="Foreign key to Mandate (UNIQUE)",
|
||||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
|
|
||||||
warningThresholdPercent: float = Field(
|
warningThresholdPercent: float = Field(
|
||||||
|
|
@ -158,7 +158,7 @@ class BillingSettings(BaseModel):
|
||||||
)
|
)
|
||||||
rechargeMaxPerMonth: int = Field(default=3, description="Max auto-recharges per month", json_schema_extra={"label": "Max. Nachladungen/Monat"})
|
rechargeMaxPerMonth: int = Field(default=3, description="Max auto-recharges per month", json_schema_extra={"label": "Max. Nachladungen/Monat"})
|
||||||
rechargesThisMonth: int = Field(default=0, description="Counter: auto-recharges used this month", json_schema_extra={"label": "Nachladungen diesen Monat"})
|
rechargesThisMonth: int = Field(default=0, description="Counter: auto-recharges used this month", json_schema_extra={"label": "Nachladungen diesen Monat"})
|
||||||
monthResetAt: Optional[datetime] = Field(None, description="When rechargesThisMonth was last reset", json_schema_extra={"label": "Monats-Reset"})
|
monthResetAt: Optional[float] = Field(None, description="When rechargesThisMonth was last reset (UTC unix)", json_schema_extra={"label": "Monats-Reset", "frontend_type": "timestamp"})
|
||||||
|
|
||||||
# Notifications
|
# Notifications
|
||||||
notifyEmails: List[str] = Field(
|
notifyEmails: List[str] = Field(
|
||||||
|
|
@ -174,10 +174,10 @@ class BillingSettings(BaseModel):
|
||||||
description="Peak indexed data volume MB this billing period",
|
description="Peak indexed data volume MB this billing period",
|
||||||
json_schema_extra={"label": "Speicher-Peak (MB)"},
|
json_schema_extra={"label": "Speicher-Peak (MB)"},
|
||||||
)
|
)
|
||||||
storagePeriodStartAt: Optional[datetime] = Field(
|
storagePeriodStartAt: Optional[float] = Field(
|
||||||
None,
|
None,
|
||||||
description="Subscription billing period start used for storage reset",
|
description="Subscription billing period start used for storage reset (UTC unix)",
|
||||||
json_schema_extra={"label": "Speicher-Periodenbeginn"},
|
json_schema_extra={"label": "Speicher-Periodenbeginn", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
storageBilledUpToMB: float = Field(
|
storageBilledUpToMB: float = Field(
|
||||||
default=0.0,
|
default=0.0,
|
||||||
|
|
@ -193,9 +193,10 @@ class StripeWebhookEvent(BaseModel):
|
||||||
description="Primary key",
|
description="Primary key",
|
||||||
)
|
)
|
||||||
event_id: str = Field(..., description="Stripe event ID (evt_xxx)")
|
event_id: str = Field(..., description="Stripe event ID (evt_xxx)")
|
||||||
processed_at: datetime = Field(
|
processed_at: float = Field(
|
||||||
default_factory=lambda: datetime.now(timezone.utc),
|
default_factory=lambda: datetime.now(timezone.utc).timestamp(),
|
||||||
description="When the event was processed",
|
description="When the event was processed (UTC unix)",
|
||||||
|
json_schema_extra={"frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -210,10 +211,14 @@ class UsageStatistics(BaseModel):
|
||||||
accountId: str = Field(
|
accountId: str = Field(
|
||||||
...,
|
...,
|
||||||
description="Foreign key to BillingAccount",
|
description="Foreign key to BillingAccount",
|
||||||
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount"}},
|
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount", "labelField": None}},
|
||||||
)
|
)
|
||||||
periodType: PeriodTypeEnum = Field(..., description="Period type", json_schema_extra={"label": "Periodentyp"})
|
periodType: PeriodTypeEnum = Field(..., description="Period type", json_schema_extra={"label": "Periodentyp"})
|
||||||
periodStart: date = Field(..., description="Period start date", json_schema_extra={"label": "Periodenbeginn"})
|
periodStart: date = Field(
|
||||||
|
...,
|
||||||
|
description="Period start date",
|
||||||
|
json_schema_extra={"label": "Periodenbeginn", "frontend_type": "date"},
|
||||||
|
)
|
||||||
|
|
||||||
# Aggregated values
|
# Aggregated values
|
||||||
totalCostCHF: float = Field(default=0.0, description="Total cost in CHF", json_schema_extra={"label": "Gesamtkosten (CHF)"})
|
totalCostCHF: float = Field(default=0.0, description="Total cost in CHF", json_schema_extra={"label": "Gesamtkosten (CHF)"})
|
||||||
|
|
|
||||||
|
|
@ -16,12 +16,12 @@ class ChatLog(PowerOnModel):
|
||||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
|
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
|
||||||
workflowId: str = Field(
|
workflowId: str = Field(
|
||||||
description="Foreign key to workflow",
|
description="Foreign key to workflow",
|
||||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||||
)
|
)
|
||||||
message: str = Field(description="Log message", json_schema_extra={"label": "Nachricht"})
|
message: str = Field(description="Log message", json_schema_extra={"label": "Nachricht"})
|
||||||
type: str = Field(description="Log type (info, warning, error, etc.)", json_schema_extra={"label": "Typ"})
|
type: str = Field(description="Log type (info, warning, error, etc.)", json_schema_extra={"label": "Typ"})
|
||||||
timestamp: float = Field(default_factory=getUtcTimestamp,
|
timestamp: float = Field(default_factory=getUtcTimestamp,
|
||||||
description="When the log entry was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel"})
|
description="When the log entry was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp"})
|
||||||
status: Optional[str] = Field(None, description="Status of the log entry", json_schema_extra={"label": "Status"})
|
status: Optional[str] = Field(None, description="Status of the log entry", json_schema_extra={"label": "Status"})
|
||||||
progress: Optional[float] = Field(None, description="Progress indicator (0.0 to 1.0)", json_schema_extra={"label": "Fortschritt"})
|
progress: Optional[float] = Field(None, description="Progress indicator (0.0 to 1.0)", json_schema_extra={"label": "Fortschritt"})
|
||||||
performance: Optional[Dict[str, Any]] = Field(None, description="Performance metrics", json_schema_extra={"label": "Leistung"})
|
performance: Optional[Dict[str, Any]] = Field(None, description="Performance metrics", json_schema_extra={"label": "Leistung"})
|
||||||
|
|
@ -37,11 +37,11 @@ class ChatDocument(PowerOnModel):
|
||||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
|
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
|
||||||
messageId: str = Field(
|
messageId: str = Field(
|
||||||
description="Foreign key to message",
|
description="Foreign key to message",
|
||||||
json_schema_extra={"label": "Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage"}},
|
json_schema_extra={"label": "Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage", "labelField": None}},
|
||||||
)
|
)
|
||||||
fileId: str = Field(
|
fileId: str = Field(
|
||||||
description="Foreign key to file",
|
description="Foreign key to file",
|
||||||
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem"}},
|
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"}},
|
||||||
)
|
)
|
||||||
fileName: str = Field(description="Name of the file", json_schema_extra={"label": "Dateiname"})
|
fileName: str = Field(description="Name of the file", json_schema_extra={"label": "Dateiname"})
|
||||||
fileSize: int = Field(description="Size of the file", json_schema_extra={"label": "Dateigröße"})
|
fileSize: int = Field(description="Size of the file", json_schema_extra={"label": "Dateigröße"})
|
||||||
|
|
@ -81,12 +81,12 @@ class ChatMessage(PowerOnModel):
|
||||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
|
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
|
||||||
workflowId: str = Field(
|
workflowId: str = Field(
|
||||||
description="Foreign key to workflow",
|
description="Foreign key to workflow",
|
||||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||||
)
|
)
|
||||||
parentMessageId: Optional[str] = Field(
|
parentMessageId: Optional[str] = Field(
|
||||||
None,
|
None,
|
||||||
description="Parent message ID for threading",
|
description="Parent message ID for threading",
|
||||||
json_schema_extra={"label": "Übergeordnete Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage"}},
|
json_schema_extra={"label": "Übergeordnete Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage", "labelField": None}},
|
||||||
)
|
)
|
||||||
documents: List[ChatDocument] = Field(default_factory=list, description="Associated documents", json_schema_extra={"label": "Dokumente"})
|
documents: List[ChatDocument] = Field(default_factory=list, description="Associated documents", json_schema_extra={"label": "Dokumente"})
|
||||||
documentsLabel: Optional[str] = Field(None, description="Label for the set of documents", json_schema_extra={"label": "Dokumenten-Label"})
|
documentsLabel: Optional[str] = Field(None, description="Label for the set of documents", json_schema_extra={"label": "Dokumenten-Label"})
|
||||||
|
|
@ -97,7 +97,7 @@ class ChatMessage(PowerOnModel):
|
||||||
sequenceNr: Optional[int] = Field(default=0,
|
sequenceNr: Optional[int] = Field(default=0,
|
||||||
description="Sequence number of the message (set automatically)", json_schema_extra={"label": "Sequenznummer"})
|
description="Sequence number of the message (set automatically)", json_schema_extra={"label": "Sequenznummer"})
|
||||||
publishedAt: Optional[float] = Field(default=None,
|
publishedAt: Optional[float] = Field(default=None,
|
||||||
description="When the message was published (UTC timestamp in seconds)", json_schema_extra={"label": "Veröffentlicht am"})
|
description="When the message was published (UTC timestamp in seconds)", json_schema_extra={"label": "Veröffentlicht am", "frontend_type": "timestamp"})
|
||||||
success: Optional[bool] = Field(None, description="Whether the message processing was successful", json_schema_extra={"label": "Erfolg"})
|
success: Optional[bool] = Field(None, description="Whether the message processing was successful", json_schema_extra={"label": "Erfolg"})
|
||||||
actionId: Optional[str] = Field(None, description="ID of the action that produced this message", json_schema_extra={"label": "Aktions-ID"})
|
actionId: Optional[str] = Field(None, description="ID of the action that produced this message", json_schema_extra={"label": "Aktions-ID"})
|
||||||
actionMethod: Optional[str] = Field(None, description="Method of the action that produced this message", json_schema_extra={"label": "Aktionsmethode"})
|
actionMethod: Optional[str] = Field(None, description="Method of the action that produced this message", json_schema_extra={"label": "Aktionsmethode"})
|
||||||
|
|
@ -125,7 +125,7 @@ class ChatWorkflow(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
linkedWorkflowId: Optional[str] = Field(
|
linkedWorkflowId: Optional[str] = Field(
|
||||||
|
|
@ -219,7 +219,7 @@ class UserInputRequest(BaseModel):
|
||||||
workflowId: Optional[str] = Field(
|
workflowId: Optional[str] = Field(
|
||||||
None,
|
None,
|
||||||
description="Optional ID of the workflow to continue",
|
description="Optional ID of the workflow to continue",
|
||||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||||
)
|
)
|
||||||
allowedProviders: Optional[List[str]] = Field(None, description="List of allowed AI providers (multiselect)", json_schema_extra={"label": "Erlaubte Anbieter"})
|
allowedProviders: Optional[List[str]] = Field(None, description="List of allowed AI providers (multiselect)", json_schema_extra={"label": "Erlaubte Anbieter"})
|
||||||
|
|
||||||
|
|
@ -281,8 +281,8 @@ class ObservationPreview(BaseModel):
|
||||||
# Extended metadata fields
|
# Extended metadata fields
|
||||||
mimeType: Optional[str] = Field(default=None, description="MIME type", json_schema_extra={"label": "MIME-Typ"})
|
mimeType: Optional[str] = Field(default=None, description="MIME type", json_schema_extra={"label": "MIME-Typ"})
|
||||||
size: Optional[str] = Field(default=None, description="File size", json_schema_extra={"label": "Größe"})
|
size: Optional[str] = Field(default=None, description="File size", json_schema_extra={"label": "Größe"})
|
||||||
created: Optional[str] = Field(default=None, description="Creation timestamp", json_schema_extra={"label": "Erstellt"})
|
created: Optional[float] = Field(default=None, description="Creation timestamp (UTC unix)", json_schema_extra={"label": "Erstellt", "frontend_type": "timestamp"})
|
||||||
modified: Optional[str] = Field(default=None, description="Modification timestamp", json_schema_extra={"label": "Geändert"})
|
modified: Optional[float] = Field(default=None, description="Modification timestamp (UTC unix)", json_schema_extra={"label": "Geändert", "frontend_type": "timestamp"})
|
||||||
typeGroup: Optional[str] = Field(default=None, description="Document type group", json_schema_extra={"label": "Typgruppe"})
|
typeGroup: Optional[str] = Field(default=None, description="Document type group", json_schema_extra={"label": "Typgruppe"})
|
||||||
documentId: Optional[str] = Field(default=None, description="Document ID", json_schema_extra={"label": "Dokument-ID"})
|
documentId: Optional[str] = Field(default=None, description="Document ID", json_schema_extra={"label": "Dokument-ID"})
|
||||||
reference: Optional[str] = Field(default=None, description="Document reference", json_schema_extra={"label": "Referenz"})
|
reference: Optional[str] = Field(default=None, description="Document reference", json_schema_extra={"label": "Referenz"})
|
||||||
|
|
@ -332,7 +332,7 @@ class ActionItem(BaseModel):
|
||||||
retryCount: int = Field(default=0, description="Number of retries attempted", json_schema_extra={"label": "Wiederholungen"})
|
retryCount: int = Field(default=0, description="Number of retries attempted", json_schema_extra={"label": "Wiederholungen"})
|
||||||
retryMax: int = Field(default=3, description="Maximum number of retries", json_schema_extra={"label": "Max. Wiederholungen"})
|
retryMax: int = Field(default=3, description="Maximum number of retries", json_schema_extra={"label": "Max. Wiederholungen"})
|
||||||
processingTime: Optional[float] = Field(None, description="Processing time in seconds", json_schema_extra={"label": "Bearbeitungszeit"})
|
processingTime: Optional[float] = Field(None, description="Processing time in seconds", json_schema_extra={"label": "Bearbeitungszeit"})
|
||||||
timestamp: float = Field(..., description="When the action was executed (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel"})
|
timestamp: float = Field(..., description="When the action was executed (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp"})
|
||||||
result: Optional[str] = Field(None, description="Result of the action", json_schema_extra={"label": "Ergebnis"})
|
result: Optional[str] = Field(None, description="Result of the action", json_schema_extra={"label": "Ergebnis"})
|
||||||
|
|
||||||
def setSuccess(self, result: str = None) -> None:
|
def setSuccess(self, result: str = None) -> None:
|
||||||
|
|
@ -361,13 +361,13 @@ class TaskItem(BaseModel):
|
||||||
workflowId: str = Field(
|
workflowId: str = Field(
|
||||||
...,
|
...,
|
||||||
description="Workflow ID",
|
description="Workflow ID",
|
||||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||||
)
|
)
|
||||||
userInput: str = Field(..., description="User input that triggered the task", json_schema_extra={"label": "Benutzereingabe"})
|
userInput: str = Field(..., description="User input that triggered the task", json_schema_extra={"label": "Benutzereingabe"})
|
||||||
status: TaskStatus = Field(default=TaskStatus.PENDING, description="Task status", json_schema_extra={"label": "Status"})
|
status: TaskStatus = Field(default=TaskStatus.PENDING, description="Task status", json_schema_extra={"label": "Status"})
|
||||||
error: Optional[str] = Field(None, description="Error message if task failed", json_schema_extra={"label": "Fehler"})
|
error: Optional[str] = Field(None, description="Error message if task failed", json_schema_extra={"label": "Fehler"})
|
||||||
startedAt: Optional[float] = Field(None, description="When the task started (UTC timestamp in seconds)", json_schema_extra={"label": "Gestartet am"})
|
startedAt: Optional[float] = Field(None, description="When the task started (UTC timestamp in seconds)", json_schema_extra={"label": "Gestartet am", "frontend_type": "timestamp"})
|
||||||
finishedAt: Optional[float] = Field(None, description="When the task finished (UTC timestamp in seconds)", json_schema_extra={"label": "Beendet am"})
|
finishedAt: Optional[float] = Field(None, description="When the task finished (UTC timestamp in seconds)", json_schema_extra={"label": "Beendet am", "frontend_type": "timestamp"})
|
||||||
actionList: List[ActionItem] = Field(default_factory=list, description="List of actions to execute", json_schema_extra={"label": "Aktionen"})
|
actionList: List[ActionItem] = Field(default_factory=list, description="List of actions to execute", json_schema_extra={"label": "Aktionen"})
|
||||||
retryCount: int = Field(default=0, description="Number of retries attempted", json_schema_extra={"label": "Wiederholungen"})
|
retryCount: int = Field(default=0, description="Number of retries attempted", json_schema_extra={"label": "Wiederholungen"})
|
||||||
retryMax: int = Field(default=3, description="Maximum number of retries", json_schema_extra={"label": "Max. Wiederholungen"})
|
retryMax: int = Field(default=3, description="Maximum number of retries", json_schema_extra={"label": "Max. Wiederholungen"})
|
||||||
|
|
@ -402,7 +402,7 @@ class TaskHandover(BaseModel):
|
||||||
improvements: List[str] = Field(default_factory=list, description="Improvement suggestions", json_schema_extra={"label": "Verbesserungen"})
|
improvements: List[str] = Field(default_factory=list, description="Improvement suggestions", json_schema_extra={"label": "Verbesserungen"})
|
||||||
workflowSummary: Optional[str] = Field(None, description="Summarized workflow context", json_schema_extra={"label": "Workflow-Zusammenfassung"})
|
workflowSummary: Optional[str] = Field(None, description="Summarized workflow context", json_schema_extra={"label": "Workflow-Zusammenfassung"})
|
||||||
messageHistory: List[str] = Field(default_factory=list, description="Key message summaries", json_schema_extra={"label": "Nachrichtenverlauf"})
|
messageHistory: List[str] = Field(default_factory=list, description="Key message summaries", json_schema_extra={"label": "Nachrichtenverlauf"})
|
||||||
timestamp: float = Field(..., description="When the handover was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel"})
|
timestamp: float = Field(..., description="When the handover was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp"})
|
||||||
handoverType: str = Field(default="task", description="Type of handover: task, phase, or workflow", json_schema_extra={"label": "Übergabetyp"})
|
handoverType: str = Field(default="task", description="Type of handover: task, phase, or workflow", json_schema_extra={"label": "Übergabetyp"})
|
||||||
|
|
||||||
class TaskContext(BaseModel):
|
class TaskContext(BaseModel):
|
||||||
|
|
|
||||||
|
|
@ -34,7 +34,7 @@ class ContentObject(BaseModel):
|
||||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||||
fileId: str = Field(
|
fileId: str = Field(
|
||||||
description="FK to the physical file",
|
description="FK to the physical file",
|
||||||
json_schema_extra={"fk_target": {"db": "poweron_management", "table": "FileItem"}},
|
json_schema_extra={"fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"}},
|
||||||
)
|
)
|
||||||
contentType: str = Field(description="text, image, videostream, audiostream, other")
|
contentType: str = Field(description="text, image, videostream, audiostream, other")
|
||||||
data: str = Field(default="", description="Content data (text, base64, URL)")
|
data: str = Field(default="", description="Content data (text, base64, URL)")
|
||||||
|
|
|
||||||
|
|
@ -23,10 +23,15 @@ class DataSource(PowerOnModel):
|
||||||
)
|
)
|
||||||
connectionId: str = Field(
|
connectionId: str = Field(
|
||||||
description="FK to UserConnection",
|
description="FK to UserConnection",
|
||||||
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection"}},
|
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection", "labelField": "externalUsername"}},
|
||||||
)
|
)
|
||||||
sourceType: str = Field(
|
sourceType: str = Field(
|
||||||
description="sharepointFolder, googleDriveFolder, outlookFolder, ftpFolder, clickupList (path under /team/...)",
|
description=(
|
||||||
|
"sharepointFolder, onedriveFolder, googleDriveFolder, "
|
||||||
|
"outlookFolder, gmailFolder, ftpFolder, clickupList "
|
||||||
|
"(path under /team/...), kdriveFolder, calendarFolder, "
|
||||||
|
"contactFolder"
|
||||||
|
),
|
||||||
json_schema_extra={"label": "Quellentyp"},
|
json_schema_extra={"label": "Quellentyp"},
|
||||||
)
|
)
|
||||||
path: str = Field(
|
path: str = Field(
|
||||||
|
|
@ -45,17 +50,17 @@ class DataSource(PowerOnModel):
|
||||||
featureInstanceId: Optional[str] = Field(
|
featureInstanceId: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Scoped to feature instance",
|
description="Scoped to feature instance",
|
||||||
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
mandateId: Optional[str] = Field(
|
mandateId: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Mandate scope",
|
description="Mandate scope",
|
||||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
userId: str = Field(
|
userId: str = Field(
|
||||||
default="",
|
default="",
|
||||||
description="Owner user ID",
|
description="Owner user ID",
|
||||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||||
)
|
)
|
||||||
autoSync: bool = Field(
|
autoSync: bool = Field(
|
||||||
default=False,
|
default=False,
|
||||||
|
|
@ -65,7 +70,7 @@ class DataSource(PowerOnModel):
|
||||||
lastSynced: Optional[float] = Field(
|
lastSynced: Optional[float] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Last sync timestamp",
|
description="Last sync timestamp",
|
||||||
json_schema_extra={"label": "Letzter Sync"},
|
json_schema_extra={"label": "Letzter Sync", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
scope: str = Field(
|
scope: str = Field(
|
||||||
default="personal",
|
default="personal",
|
||||||
|
|
@ -91,5 +96,9 @@ class ExternalEntry(BaseModel):
|
||||||
isFolder: bool = Field(default=False, description="True if directory/folder")
|
isFolder: bool = Field(default=False, description="True if directory/folder")
|
||||||
size: Optional[int] = Field(default=None, description="File size in bytes")
|
size: Optional[int] = Field(default=None, description="File size in bytes")
|
||||||
mimeType: Optional[str] = Field(default=None, description="MIME type (files only)")
|
mimeType: Optional[str] = Field(default=None, description="MIME type (files only)")
|
||||||
lastModified: Optional[float] = Field(default=None, description="Last modification timestamp")
|
lastModified: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Last modification timestamp",
|
||||||
|
json_schema_extra={"frontend_type": "timestamp"},
|
||||||
|
)
|
||||||
metadata: Dict[str, Any] = Field(default_factory=dict, description="Provider-specific metadata")
|
metadata: Dict[str, Any] = Field(default_factory=dict, description="Provider-specific metadata")
|
||||||
|
|
|
||||||
|
|
@ -4,10 +4,13 @@
|
||||||
Document reference models for typed document references in workflows.
|
Document reference models for typed document references in workflows.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import List, Optional
|
import logging
|
||||||
|
from typing import Any, List, Optional
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
from modules.shared.i18nRegistry import i18nModel
|
from modules.shared.i18nRegistry import i18nModel
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DocumentReference(BaseModel):
|
class DocumentReference(BaseModel):
|
||||||
"""Base class for document references"""
|
"""Base class for document references"""
|
||||||
|
|
@ -107,11 +110,104 @@ class DocumentReferenceList(BaseModel):
|
||||||
# docItem:documentId
|
# docItem:documentId
|
||||||
references.append(DocumentItemReference(documentId=parts[0]))
|
references.append(DocumentItemReference(documentId=parts[0]))
|
||||||
|
|
||||||
# Unknown format - skip or log warning
|
|
||||||
else:
|
else:
|
||||||
# Try to parse as simple string (backward compatibility)
|
if not refStr:
|
||||||
# Assume it's a label if it doesn't match known patterns
|
continue
|
||||||
if refStr:
|
import re
|
||||||
|
if re.match(r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$', refStr, re.I):
|
||||||
|
references.append(DocumentItemReference(documentId=refStr))
|
||||||
|
else:
|
||||||
references.append(DocumentListReference(label=refStr))
|
references.append(DocumentListReference(label=refStr))
|
||||||
|
|
||||||
return cls(references=references)
|
return cls(references=references)
|
||||||
|
|
||||||
|
|
||||||
|
def coerceDocumentReferenceList(value: Any) -> DocumentReferenceList:
|
||||||
|
"""Tolerant coercion of any agent/UI-supplied document list to
|
||||||
|
:class:`DocumentReferenceList`.
|
||||||
|
|
||||||
|
Accepts the canonical formats plus the dict-wrapper shapes that
|
||||||
|
LLM tool-callers tend to generate when they see a
|
||||||
|
``type=DocumentList`` parameter:
|
||||||
|
|
||||||
|
* ``None`` / ``""`` -> empty list
|
||||||
|
* :class:`DocumentReferenceList` -> as-is
|
||||||
|
* ``str`` -> single-element string list
|
||||||
|
* ``list[str]`` -> :meth:`from_string_list`
|
||||||
|
* ``list[dict]`` with ``id`` or ``documentId`` -> item references
|
||||||
|
* ``{"documents": [...]}`` / ``{"references": [...]}`` ->
|
||||||
|
recurse into the inner list (this is the shape LLMs love)
|
||||||
|
* ``{"id": "..."}`` / ``{"documentId": "..."}`` -> single
|
||||||
|
item reference
|
||||||
|
* any unrecognised input -> empty list with a WARN log; never
|
||||||
|
raises (the caller decides whether an empty list is fatal).
|
||||||
|
"""
|
||||||
|
if value is None or value == "":
|
||||||
|
return DocumentReferenceList(references=[])
|
||||||
|
if isinstance(value, DocumentReferenceList):
|
||||||
|
return value
|
||||||
|
if isinstance(value, str):
|
||||||
|
return DocumentReferenceList.from_string_list([value])
|
||||||
|
|
||||||
|
if isinstance(value, dict):
|
||||||
|
for innerKey in ("documents", "references", "items", "files"):
|
||||||
|
if innerKey in value and isinstance(value[innerKey], list):
|
||||||
|
return coerceDocumentReferenceList(value[innerKey])
|
||||||
|
docId = value.get("documentId") or value.get("id")
|
||||||
|
if docId:
|
||||||
|
docIdStr = str(docId)
|
||||||
|
if docIdStr.startswith("docItem:") or docIdStr.startswith("docList:"):
|
||||||
|
return DocumentReferenceList.from_string_list([docIdStr])
|
||||||
|
return DocumentReferenceList(references=[
|
||||||
|
DocumentItemReference(
|
||||||
|
documentId=docIdStr,
|
||||||
|
fileName=value.get("fileName") or value.get("name"),
|
||||||
|
)
|
||||||
|
])
|
||||||
|
logger.warning(
|
||||||
|
f"coerceDocumentReferenceList: unsupported dict shape "
|
||||||
|
f"(keys={list(value.keys())}); returning empty list."
|
||||||
|
)
|
||||||
|
return DocumentReferenceList(references=[])
|
||||||
|
|
||||||
|
if isinstance(value, list):
|
||||||
|
if not value:
|
||||||
|
return DocumentReferenceList(references=[])
|
||||||
|
first = value[0]
|
||||||
|
if isinstance(first, str):
|
||||||
|
return DocumentReferenceList.from_string_list(value)
|
||||||
|
if isinstance(first, dict):
|
||||||
|
references: List[DocumentReference] = []
|
||||||
|
for item in value:
|
||||||
|
if not isinstance(item, dict):
|
||||||
|
continue
|
||||||
|
docId = item.get("documentId") or item.get("id")
|
||||||
|
if docId:
|
||||||
|
docIdStr = str(docId)
|
||||||
|
if docIdStr.startswith("docItem:") or docIdStr.startswith("docList:"):
|
||||||
|
parsed = DocumentReferenceList.from_string_list([docIdStr])
|
||||||
|
references.extend(parsed.references)
|
||||||
|
else:
|
||||||
|
references.append(DocumentItemReference(
|
||||||
|
documentId=docIdStr,
|
||||||
|
fileName=item.get("fileName") or item.get("name"),
|
||||||
|
))
|
||||||
|
elif item.get("label"):
|
||||||
|
references.append(DocumentListReference(
|
||||||
|
label=str(item["label"]),
|
||||||
|
messageId=item.get("messageId"),
|
||||||
|
))
|
||||||
|
return DocumentReferenceList(references=references)
|
||||||
|
# Mixed/object list (e.g. inline ActionDocument-like): caller
|
||||||
|
# must pre-handle that case before calling this coercer.
|
||||||
|
logger.warning(
|
||||||
|
f"coerceDocumentReferenceList: list element type "
|
||||||
|
f"{type(first).__name__} not recognised; returning empty list."
|
||||||
|
)
|
||||||
|
return DocumentReferenceList(references=[])
|
||||||
|
|
||||||
|
logger.warning(
|
||||||
|
f"coerceDocumentReferenceList: unsupported value type "
|
||||||
|
f"{type(value).__name__}; returning empty list."
|
||||||
|
)
|
||||||
|
return DocumentReferenceList(references=[])
|
||||||
|
|
|
||||||
|
|
@ -95,7 +95,14 @@ class ExtractionOptions(BaseModel):
|
||||||
imageQuality: int = Field(default=85, ge=1, le=100, description="Image quality (1-100)")
|
imageQuality: int = Field(default=85, ge=1, le=100, description="Image quality (1-100)")
|
||||||
|
|
||||||
# Merging strategy
|
# Merging strategy
|
||||||
mergeStrategy: MergeStrategy = Field(default_factory=MergeStrategy, description="Strategy for merging extraction results")
|
mergeStrategy: Optional[MergeStrategy] = Field(
|
||||||
|
default_factory=MergeStrategy,
|
||||||
|
description=(
|
||||||
|
"Strategy for merging extraction results. Pass None to skip merging entirely "
|
||||||
|
"(required for per-chunk ingestion pipelines like RAG, where per-page/per-section "
|
||||||
|
"granularity must be preserved for embedding)."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
# Optional chunking parameters (for backward compatibility)
|
# Optional chunking parameters (for backward compatibility)
|
||||||
chunkAllowed: Optional[bool] = Field(default=None, description="Whether chunking is allowed")
|
chunkAllowed: Optional[bool] = Field(default=None, description="Whether chunking is allowed")
|
||||||
|
|
|
||||||
|
|
@ -23,11 +23,11 @@ class FeatureDataSource(PowerOnModel):
|
||||||
)
|
)
|
||||||
featureInstanceId: str = Field(
|
featureInstanceId: str = Field(
|
||||||
description="FK to FeatureInstance",
|
description="FK to FeatureInstance",
|
||||||
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
featureCode: str = Field(
|
featureCode: str = Field(
|
||||||
description="Feature code (e.g. trustee, commcoach)",
|
description="Feature code (e.g. trustee, commcoach)",
|
||||||
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
|
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"}},
|
||||||
)
|
)
|
||||||
tableName: str = Field(
|
tableName: str = Field(
|
||||||
description="Table name from DATA_OBJECTS meta (e.g. TrusteePosition)",
|
description="Table name from DATA_OBJECTS meta (e.g. TrusteePosition)",
|
||||||
|
|
@ -44,16 +44,16 @@ class FeatureDataSource(PowerOnModel):
|
||||||
mandateId: str = Field(
|
mandateId: str = Field(
|
||||||
default="",
|
default="",
|
||||||
description="Mandate scope",
|
description="Mandate scope",
|
||||||
json_schema_extra={"label": "Mandant", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
json_schema_extra={"label": "Mandant", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
userId: str = Field(
|
userId: str = Field(
|
||||||
default="",
|
default="",
|
||||||
description="Owner user ID",
|
description="Owner user ID",
|
||||||
json_schema_extra={"label": "Benutzer", "fk_target": {"db": "poweron_app", "table": "User"}},
|
json_schema_extra={"label": "Benutzer", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||||
)
|
)
|
||||||
workspaceInstanceId: str = Field(
|
workspaceInstanceId: str = Field(
|
||||||
description="Workspace feature instance where this source is used",
|
description="Workspace feature instance where this source is used",
|
||||||
json_schema_extra={"label": "Workspace", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
json_schema_extra={"label": "Workspace", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
scope: str = Field(
|
scope: str = Field(
|
||||||
default="personal",
|
default="personal",
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ class FeatureInstance(PowerOnModel):
|
||||||
"frontend_type": "select",
|
"frontend_type": "select",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"},
|
"fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
mandateId: str = Field(
|
mandateId: str = Field(
|
||||||
|
|
@ -53,7 +53,7 @@ class FeatureInstance(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
label: str = Field(
|
label: str = Field(
|
||||||
|
|
|
||||||
|
|
@ -1,82 +0,0 @@
|
||||||
# Copyright (c) 2025 Patrick Motsch
|
|
||||||
# All rights reserved.
|
|
||||||
"""FileFolder: hierarchical folder structure for file organization."""
|
|
||||||
|
|
||||||
from typing import Optional
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
from modules.datamodels.datamodelBase import PowerOnModel
|
|
||||||
from modules.shared.i18nRegistry import i18nModel
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
|
|
||||||
@i18nModel("Dateiordner")
|
|
||||||
class FileFolder(PowerOnModel):
|
|
||||||
"""Hierarchischer Ordner fuer die Dateiverwaltung."""
|
|
||||||
id: str = Field(
|
|
||||||
default_factory=lambda: str(uuid.uuid4()),
|
|
||||||
description="Primary key",
|
|
||||||
json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
|
|
||||||
)
|
|
||||||
name: str = Field(
|
|
||||||
description="Folder name",
|
|
||||||
json_schema_extra={"label": "Name", "frontend_type": "text", "frontend_readonly": False, "frontend_required": True},
|
|
||||||
)
|
|
||||||
parentId: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Parent folder ID (null = root)",
|
|
||||||
json_schema_extra={
|
|
||||||
"label": "Uebergeordneter Ordner",
|
|
||||||
"frontend_type": "text",
|
|
||||||
"frontend_readonly": False,
|
|
||||||
"frontend_required": False,
|
|
||||||
"fk_target": {"db": "poweron_management", "table": "FileFolder"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
mandateId: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Mandate context",
|
|
||||||
json_schema_extra={
|
|
||||||
"label": "Mandanten-ID",
|
|
||||||
"frontend_type": "text",
|
|
||||||
"frontend_readonly": True,
|
|
||||||
"frontend_required": False,
|
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
featureInstanceId: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
description="Feature instance context",
|
|
||||||
json_schema_extra={
|
|
||||||
"label": "Feature-Instanz-ID",
|
|
||||||
"frontend_type": "text",
|
|
||||||
"frontend_readonly": True,
|
|
||||||
"frontend_required": False,
|
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
scope: str = Field(
|
|
||||||
default="personal",
|
|
||||||
description="Data visibility scope: personal, featureInstance, mandate, global. Inherited by files in this folder.",
|
|
||||||
json_schema_extra={
|
|
||||||
"label": "Sichtbarkeit",
|
|
||||||
"frontend_type": "select",
|
|
||||||
"frontend_readonly": False,
|
|
||||||
"frontend_required": False,
|
|
||||||
"frontend_options": [
|
|
||||||
{"value": "personal", "label": "Persönlich"},
|
|
||||||
{"value": "featureInstance", "label": "Feature-Instanz"},
|
|
||||||
{"value": "mandate", "label": "Mandant"},
|
|
||||||
{"value": "global", "label": "Global"},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
neutralize: bool = Field(
|
|
||||||
default=False,
|
|
||||||
description="Whether files in this folder should be neutralized before AI processing. Inherited by new/moved files.",
|
|
||||||
json_schema_extra={
|
|
||||||
"label": "Neutralisieren",
|
|
||||||
"frontend_type": "checkbox",
|
|
||||||
"frontend_readonly": False,
|
|
||||||
"frontend_required": False,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
@ -10,6 +10,69 @@ import uuid
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
|
|
||||||
|
@i18nModel("Ordner")
|
||||||
|
class FileFolder(PowerOnModel):
|
||||||
|
"""Persistenter Datei-Ordner im Management-DB-Kontext (RBAC wie FileItem)."""
|
||||||
|
|
||||||
|
id: str = Field(
|
||||||
|
default_factory=lambda: str(uuid.uuid4()),
|
||||||
|
description="Primary key",
|
||||||
|
json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
|
||||||
|
)
|
||||||
|
name: str = Field(
|
||||||
|
description="Display name of the folder",
|
||||||
|
json_schema_extra={"label": "Name", "frontend_type": "text", "frontend_readonly": False, "frontend_required": True},
|
||||||
|
)
|
||||||
|
parentId: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Parent folder id; empty or None for root",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Uebergeordneter Ordner",
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_readonly": False,
|
||||||
|
"frontend_required": False,
|
||||||
|
"fk_target": {"db": "poweron_management", "table": "FileFolder", "labelField": "name"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
mandateId: Optional[str] = Field(
|
||||||
|
default="",
|
||||||
|
description="ID of the mandate this folder belongs to",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Mandant",
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
featureInstanceId: Optional[str] = Field(
|
||||||
|
default="",
|
||||||
|
description="ID of the feature instance this folder belongs to",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Feature-Instanz",
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
scope: str = Field(
|
||||||
|
default="personal",
|
||||||
|
description="Data visibility scope: personal, featureInstance, mandate, global",
|
||||||
|
json_schema_extra={"label": "Sichtbarkeit", "frontend_type": "select", "frontend_readonly": False, "frontend_required": False, "frontend_options": [
|
||||||
|
{"value": "personal", "label": "Persönlich"},
|
||||||
|
{"value": "featureInstance", "label": "Feature-Instanz"},
|
||||||
|
{"value": "mandate", "label": "Mandant"},
|
||||||
|
{"value": "global", "label": "Global"},
|
||||||
|
]},
|
||||||
|
)
|
||||||
|
neutralize: bool = Field(
|
||||||
|
default=False,
|
||||||
|
description="Whether files in this folder should be neutralized before AI processing",
|
||||||
|
json_schema_extra={"label": "Neutralisieren", "frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@i18nModel("Datei")
|
@i18nModel("Datei")
|
||||||
class FileItem(PowerOnModel):
|
class FileItem(PowerOnModel):
|
||||||
"""Metadaten einer gespeicherten Datei."""
|
"""Metadaten einer gespeicherten Datei."""
|
||||||
|
|
@ -30,10 +93,7 @@ class FileItem(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"frontend_fk_source": "/api/mandates/",
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
"frontend_fk_display_field": "label",
|
|
||||||
"fk_model": "Mandate",
|
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
featureInstanceId: Optional[str] = Field(
|
featureInstanceId: Optional[str] = Field(
|
||||||
|
|
@ -44,10 +104,18 @@ class FileItem(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"frontend_fk_source": "/api/features/instances",
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
"frontend_fk_display_field": "label",
|
},
|
||||||
"fk_model": "FeatureInstance",
|
)
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
folderId: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="ID of the folder containing this file (if any)",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Ordner",
|
||||||
|
"frontend_type": "text",
|
||||||
|
"frontend_readonly": False,
|
||||||
|
"frontend_required": False,
|
||||||
|
"fk_target": {"db": "poweron_management", "table": "FileFolder", "labelField": "name"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
mimeType: str = Field(
|
mimeType: str = Field(
|
||||||
|
|
@ -74,17 +142,6 @@ class FileItem(PowerOnModel):
|
||||||
description="Tags for categorization and search",
|
description="Tags for categorization and search",
|
||||||
json_schema_extra={"label": "Tags", "frontend_type": "tags", "frontend_readonly": False, "frontend_required": False},
|
json_schema_extra={"label": "Tags", "frontend_type": "tags", "frontend_readonly": False, "frontend_required": False},
|
||||||
)
|
)
|
||||||
folderId: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
description="ID of the parent folder",
|
|
||||||
json_schema_extra={
|
|
||||||
"label": "Ordner-ID",
|
|
||||||
"frontend_type": "text",
|
|
||||||
"frontend_readonly": False,
|
|
||||||
"frontend_required": False,
|
|
||||||
"fk_target": {"db": "poweron_management", "table": "FileFolder"},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
description: Optional[str] = Field(
|
description: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="User-provided description of the file",
|
description="User-provided description of the file",
|
||||||
|
|
|
||||||
|
|
@ -5,10 +5,11 @@ Invitation model for self-service onboarding.
|
||||||
Token-basierte Einladungen für neue User zu Mandanten/Features.
|
Token-basierte Einladungen für neue User zu Mandanten/Features.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
import secrets
|
import secrets
|
||||||
from typing import Optional, List
|
from typing import Optional, List
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field, computed_field
|
||||||
from modules.datamodels.datamodelBase import PowerOnModel
|
from modules.datamodels.datamodelBase import PowerOnModel
|
||||||
from modules.shared.i18nRegistry import i18nModel
|
from modules.shared.i18nRegistry import i18nModel
|
||||||
|
|
||||||
|
|
@ -37,7 +38,7 @@ class Invitation(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
featureInstanceId: Optional[str] = Field(
|
featureInstanceId: Optional[str] = Field(
|
||||||
|
|
@ -48,7 +49,7 @@ class Invitation(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
roleIds: List[str] = Field(
|
roleIds: List[str] = Field(
|
||||||
|
|
@ -80,7 +81,7 @@ class Invitation(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
usedAt: Optional[float] = Field(
|
usedAt: Optional[float] = Field(
|
||||||
|
|
@ -94,10 +95,26 @@ class Invitation(PowerOnModel):
|
||||||
json_schema_extra={"label": "Widerrufen am", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False}
|
json_schema_extra={"label": "Widerrufen am", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False}
|
||||||
)
|
)
|
||||||
|
|
||||||
emailSent: Optional[bool] = Field(
|
emailSentFlag: Optional[bool] = Field(
|
||||||
default=False,
|
default=False,
|
||||||
description="Whether the invitation email was successfully sent",
|
description="Whether the invitation email was successfully sent",
|
||||||
json_schema_extra={"label": "E-Mail gesendet", "frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": False}
|
json_schema_extra={
|
||||||
|
"label": "E-Mail gesendet",
|
||||||
|
"frontend_type": "checkbox",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
emailSentAt: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Timestamp when the invitation email was sent (UTC, seconds)",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "E-Mail gesendet am",
|
||||||
|
"frontend_type": "timestamp",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
maxUses: int = Field(
|
maxUses: int = Field(
|
||||||
|
|
@ -113,3 +130,33 @@ class Invitation(PowerOnModel):
|
||||||
description="Current number of times this invitation has been used",
|
description="Current number of times this invitation has been used",
|
||||||
json_schema_extra={"label": "Aktuelle Verwendungen", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False}
|
json_schema_extra={"label": "Aktuelle Verwendungen", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@computed_field( # type: ignore[prop-decorator]
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Abgelaufen",
|
||||||
|
"frontend_type": "checkbox",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
@property
|
||||||
|
def expiredFlag(self) -> bool:
|
||||||
|
"""True iff `expiresAt` lies in the past (UTC)."""
|
||||||
|
if self.expiresAt is None:
|
||||||
|
return False
|
||||||
|
return float(self.expiresAt) < time.time()
|
||||||
|
|
||||||
|
@computed_field( # type: ignore[prop-decorator]
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Verbraucht",
|
||||||
|
"frontend_type": "checkbox",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
@property
|
||||||
|
def usedUpFlag(self) -> bool:
|
||||||
|
"""True iff `currentUses >= maxUses`."""
|
||||||
|
return (self.currentUses or 0) >= (self.maxUses or 1)
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ Unified JSON document schema and helpers used by both generation prompts and ren
|
||||||
This defines a single canonical template and the supported section types.
|
This defines a single canonical template and the supported section types.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import List
|
from typing import List, Literal, TypedDict
|
||||||
|
|
||||||
# Canonical list of supported section types across the system
|
# Canonical list of supported section types across the system
|
||||||
supportedSectionTypes: List[str] = [
|
supportedSectionTypes: List[str] = [
|
||||||
|
|
@ -18,6 +18,21 @@ supportedSectionTypes: List[str] = [
|
||||||
"image",
|
"image",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
class InlineRun(TypedDict, total=False):
|
||||||
|
"""Single inline content run. Every paragraph/cell/list-item is a List[InlineRun]."""
|
||||||
|
type: Literal["text", "image", "link", "bold", "italic", "code"]
|
||||||
|
value: str # text content (for text/bold/italic/code/link-label)
|
||||||
|
fileId: str # for type=image: reference to FileItem
|
||||||
|
base64Data: str # for type=image: resolved base64 (post-processing)
|
||||||
|
mimeType: str # for type=image: e.g. "image/png"
|
||||||
|
widthPt: int # for type=image: optional render width
|
||||||
|
href: str # for type=link: URL target
|
||||||
|
|
||||||
|
supportedInlineRunTypes: List[str] = [
|
||||||
|
"text", "image", "link", "bold", "italic", "code",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
# Canonical JSON template used for AI generation (documents array + sections)
|
# Canonical JSON template used for AI generation (documents array + sections)
|
||||||
# This template is used for STRUCTURE generation - sections have empty elements arrays.
|
# This template is used for STRUCTURE generation - sections have empty elements arrays.
|
||||||
# For content generation, elements arrays will be populated later.
|
# For content generation, elements arrays will be populated later.
|
||||||
|
|
|
||||||
|
|
@ -30,17 +30,17 @@ class FileContentIndex(PowerOnModel):
|
||||||
)
|
)
|
||||||
userId: str = Field(
|
userId: str = Field(
|
||||||
description="Owner user ID",
|
description="Owner user ID",
|
||||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||||
)
|
)
|
||||||
featureInstanceId: str = Field(
|
featureInstanceId: str = Field(
|
||||||
default="",
|
default="",
|
||||||
description="Feature instance scope",
|
description="Feature instance scope",
|
||||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
mandateId: str = Field(
|
mandateId: str = Field(
|
||||||
default="",
|
default="",
|
||||||
description="Mandate scope",
|
description="Mandate scope",
|
||||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
fileName: str = Field(
|
fileName: str = Field(
|
||||||
description="Original file name",
|
description="Original file name",
|
||||||
|
|
@ -78,7 +78,7 @@ class FileContentIndex(PowerOnModel):
|
||||||
extractedAt: float = Field(
|
extractedAt: float = Field(
|
||||||
default_factory=getUtcTimestamp,
|
default_factory=getUtcTimestamp,
|
||||||
description="Extraction timestamp",
|
description="Extraction timestamp",
|
||||||
json_schema_extra={"label": "Extrahiert am"},
|
json_schema_extra={"label": "Extrahiert am", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
status: str = Field(
|
status: str = Field(
|
||||||
default="pending",
|
default="pending",
|
||||||
|
|
@ -90,6 +90,16 @@ class FileContentIndex(PowerOnModel):
|
||||||
description="Data visibility scope: personal, featureInstance, mandate, global",
|
description="Data visibility scope: personal, featureInstance, mandate, global",
|
||||||
json_schema_extra={"label": "Sichtbarkeit"},
|
json_schema_extra={"label": "Sichtbarkeit"},
|
||||||
)
|
)
|
||||||
|
sourceKind: str = Field(
|
||||||
|
default="file",
|
||||||
|
description="Origin of the indexed content: file, sharepoint_item, outlook_message, outlook_attachment, ...",
|
||||||
|
json_schema_extra={"label": "Quellenart"},
|
||||||
|
)
|
||||||
|
connectionId: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="UserConnection ID if this index entry originates from an external connector",
|
||||||
|
json_schema_extra={"label": "Connection-ID"},
|
||||||
|
)
|
||||||
neutralizationStatus: Optional[str] = Field(
|
neutralizationStatus: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Neutralization status: completed, failed, skipped, None = not required",
|
description="Neutralization status: completed, failed, skipped, None = not required",
|
||||||
|
|
@ -116,16 +126,16 @@ class ContentChunk(PowerOnModel):
|
||||||
)
|
)
|
||||||
fileId: str = Field(
|
fileId: str = Field(
|
||||||
description="FK to the source file",
|
description="FK to the source file",
|
||||||
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem"}},
|
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"}},
|
||||||
)
|
)
|
||||||
userId: str = Field(
|
userId: str = Field(
|
||||||
description="Owner user ID",
|
description="Owner user ID",
|
||||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||||
)
|
)
|
||||||
featureInstanceId: str = Field(
|
featureInstanceId: str = Field(
|
||||||
default="",
|
default="",
|
||||||
description="Feature instance scope",
|
description="Feature instance scope",
|
||||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
contentType: str = Field(
|
contentType: str = Field(
|
||||||
description="Content type: text, image, videostream, audiostream, other",
|
description="Content type: text, image, videostream, audiostream, other",
|
||||||
|
|
@ -214,16 +224,16 @@ class WorkflowMemory(PowerOnModel):
|
||||||
)
|
)
|
||||||
workflowId: str = Field(
|
workflowId: str = Field(
|
||||||
description="FK to the workflow",
|
description="FK to the workflow",
|
||||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||||
)
|
)
|
||||||
userId: str = Field(
|
userId: str = Field(
|
||||||
description="Owner user ID",
|
description="Owner user ID",
|
||||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||||
)
|
)
|
||||||
featureInstanceId: str = Field(
|
featureInstanceId: str = Field(
|
||||||
default="",
|
default="",
|
||||||
description="Feature instance scope",
|
description="Feature instance scope",
|
||||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
key: str = Field(
|
key: str = Field(
|
||||||
description="Key identifier (e.g. 'entity:companyName')",
|
description="Key identifier (e.g. 'entity:companyName')",
|
||||||
|
|
|
||||||
|
|
@ -31,10 +31,7 @@ class UserMandate(PowerOnModel):
|
||||||
"frontend_type": "select",
|
"frontend_type": "select",
|
||||||
"frontend_readonly": False,
|
"frontend_readonly": False,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"frontend_fk_source": "/api/users/",
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
"frontend_fk_display_field": "username",
|
|
||||||
"fk_model": "User",
|
|
||||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
mandateId: str = Field(
|
mandateId: str = Field(
|
||||||
|
|
@ -44,10 +41,7 @@ class UserMandate(PowerOnModel):
|
||||||
"frontend_type": "select",
|
"frontend_type": "select",
|
||||||
"frontend_readonly": False,
|
"frontend_readonly": False,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"frontend_fk_source": "/api/mandates/",
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
"frontend_fk_display_field": "label",
|
|
||||||
"fk_model": "Mandate",
|
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
enabled: bool = Field(
|
enabled: bool = Field(
|
||||||
|
|
@ -75,9 +69,7 @@ class FeatureAccess(PowerOnModel):
|
||||||
"frontend_type": "select",
|
"frontend_type": "select",
|
||||||
"frontend_readonly": False,
|
"frontend_readonly": False,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"frontend_fk_source": "/api/users/",
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
"frontend_fk_display_field": "username",
|
|
||||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
featureInstanceId: str = Field(
|
featureInstanceId: str = Field(
|
||||||
|
|
@ -87,9 +79,7 @@ class FeatureAccess(PowerOnModel):
|
||||||
"frontend_type": "select",
|
"frontend_type": "select",
|
||||||
"frontend_readonly": False,
|
"frontend_readonly": False,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"frontend_fk_source": "/api/features/instances",
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
"frontend_fk_display_field": "label",
|
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
enabled: bool = Field(
|
enabled: bool = Field(
|
||||||
|
|
@ -117,7 +107,7 @@ class UserMandateRole(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "UserMandate"},
|
"fk_target": {"db": "poweron_app", "table": "UserMandate", "labelField": None},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
roleId: str = Field(
|
roleId: str = Field(
|
||||||
|
|
@ -127,9 +117,7 @@ class UserMandateRole(PowerOnModel):
|
||||||
"frontend_type": "select",
|
"frontend_type": "select",
|
||||||
"frontend_readonly": False,
|
"frontend_readonly": False,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"frontend_fk_source": "/api/rbac/roles",
|
"fk_target": {"db": "poweron_app", "table": "Role", "labelField": "roleLabel"},
|
||||||
"frontend_fk_display_field": "roleLabel",
|
|
||||||
"fk_target": {"db": "poweron_app", "table": "Role"},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -152,7 +140,7 @@ class FeatureAccessRole(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureAccess"},
|
"fk_target": {"db": "poweron_app", "table": "FeatureAccess", "labelField": None},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
roleId: str = Field(
|
roleId: str = Field(
|
||||||
|
|
@ -162,8 +150,6 @@ class FeatureAccessRole(PowerOnModel):
|
||||||
"frontend_type": "select",
|
"frontend_type": "select",
|
||||||
"frontend_readonly": False,
|
"frontend_readonly": False,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"frontend_fk_source": "/api/rbac/roles",
|
"fk_target": {"db": "poweron_app", "table": "Role", "labelField": "roleLabel"},
|
||||||
"frontend_fk_display_field": "roleLabel",
|
|
||||||
"fk_target": {"db": "poweron_app", "table": "Role"},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -64,7 +64,7 @@ class MessagingSubscription(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Mandanten-ID",
|
"label": "Mandanten-ID",
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
featureInstanceId: str = Field(
|
featureInstanceId: str = Field(
|
||||||
|
|
@ -74,7 +74,7 @@ class MessagingSubscription(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Feature-Instanz-ID",
|
"label": "Feature-Instanz-ID",
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
description: Optional[str] = Field(
|
description: Optional[str] = Field(
|
||||||
|
|
@ -131,7 +131,7 @@ class MessagingSubscriptionRegistration(BaseModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Mandanten-ID",
|
"label": "Mandanten-ID",
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
featureInstanceId: str = Field(
|
featureInstanceId: str = Field(
|
||||||
|
|
@ -141,7 +141,7 @@ class MessagingSubscriptionRegistration(BaseModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Feature-Instanz-ID",
|
"label": "Feature-Instanz-ID",
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
subscriptionId: str = Field(
|
subscriptionId: str = Field(
|
||||||
|
|
@ -160,7 +160,7 @@ class MessagingSubscriptionRegistration(BaseModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Benutzer-ID",
|
"label": "Benutzer-ID",
|
||||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
channel: MessagingChannel = Field(
|
channel: MessagingChannel = Field(
|
||||||
|
|
@ -249,7 +249,7 @@ class MessagingDelivery(BaseModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Benutzer-ID",
|
"label": "Benutzer-ID",
|
||||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
channel: MessagingChannel = Field(
|
channel: MessagingChannel = Field(
|
||||||
|
|
@ -296,7 +296,7 @@ class MessagingDelivery(BaseModel):
|
||||||
default=None,
|
default=None,
|
||||||
description="When the delivery was sent (UTC timestamp in seconds)",
|
description="When the delivery was sent (UTC timestamp in seconds)",
|
||||||
json_schema_extra={
|
json_schema_extra={
|
||||||
"frontend_type": "datetime",
|
"frontend_type": "timestamp",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Gesendet am",
|
"label": "Gesendet am",
|
||||||
|
|
|
||||||
|
|
@ -65,7 +65,7 @@ class UserNotification(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -9,14 +9,95 @@ All models use camelStyle naming convention for consistency with frontend.
|
||||||
from typing import List, Dict, Any, Optional, Generic, TypeVar
|
from typing import List, Dict, Any, Optional, Generic, TypeVar
|
||||||
from pydantic import BaseModel, Field, ConfigDict
|
from pydantic import BaseModel, Field, ConfigDict
|
||||||
import math
|
import math
|
||||||
|
import uuid
|
||||||
|
|
||||||
T = TypeVar('T')
|
T = TypeVar('T')
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Group layout models (Strategy B — derived from Views, purely presentational)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class GroupByLevel(BaseModel):
|
||||||
|
"""One level of a multi-level grouping definition, stored inside a TableListView config."""
|
||||||
|
field: str = Field(..., description="Field key to group by")
|
||||||
|
nullLabel: str = Field(default="—", description="Display label for null/empty values")
|
||||||
|
direction: str = Field(
|
||||||
|
default="asc",
|
||||||
|
description="Order of group bands at this level: 'asc' or 'desc'",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class GroupBand(BaseModel):
|
||||||
|
"""
|
||||||
|
A contiguous block of rows that share the same group path, intersecting the current page.
|
||||||
|
|
||||||
|
startRowIndex and rowCount are 0-based indices relative to the current page's items[].
|
||||||
|
"""
|
||||||
|
path: List[str] = Field(..., description="Hierarchical group key (one entry per level)")
|
||||||
|
label: str = Field(..., description="Display label for this band (last path element)")
|
||||||
|
startRowIndex: int = Field(..., description="0-based start index within items[] on this page")
|
||||||
|
rowCount: int = Field(..., description="Number of items in this band on this page")
|
||||||
|
|
||||||
|
|
||||||
|
class GroupLayout(BaseModel):
|
||||||
|
"""
|
||||||
|
Grouping structure for the current response page.
|
||||||
|
Included only when the effective view has groupByLevels configured.
|
||||||
|
The frontend renders group header rows by iterating bands and inserting
|
||||||
|
headers before each startRowIndex.
|
||||||
|
"""
|
||||||
|
levels: List[str] = Field(..., description="Ordered field keys that define the grouping hierarchy")
|
||||||
|
bands: List[GroupBand] = Field(..., description="Bands intersecting the current page, in order")
|
||||||
|
|
||||||
|
|
||||||
|
class AppliedViewMeta(BaseModel):
|
||||||
|
"""Minimal metadata about the view that was applied to this response."""
|
||||||
|
viewKey: Optional[str] = None
|
||||||
|
displayName: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Persisted view model
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TableListView(BaseModel):
|
||||||
|
"""
|
||||||
|
A saved table view for one (userId, contextKey) pair.
|
||||||
|
|
||||||
|
config schema (schemaVersion=1):
|
||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"filters": {}, # same structure as PaginationParams.filters
|
||||||
|
"sort": [], # same structure as PaginationParams.sort
|
||||||
|
"groupByLevels": [ # ordered grouping levels
|
||||||
|
{"field": "scope", "nullLabel": "—", "direction": "asc"}
|
||||||
|
],
|
||||||
|
"collapsedSectionKeys": [], # optional: section UI (stable group keys)
|
||||||
|
"collapsedGroupKeys": [], # optional: inline group bands (path.join('///'))
|
||||||
|
}
|
||||||
|
|
||||||
|
contextKey convention: API path without /api/ prefix and without trailing slash.
|
||||||
|
Examples: "connections", "prompts", "admin/users", "files/list"
|
||||||
|
|
||||||
|
viewKey is a user-defined slug, unique per (userId, mandateId, contextKey).
|
||||||
|
"""
|
||||||
|
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||||
|
userId: str
|
||||||
|
mandateId: Optional[str] = None
|
||||||
|
contextKey: str
|
||||||
|
viewKey: str
|
||||||
|
displayName: str
|
||||||
|
config: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
updatedAt: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Sort and pagination models
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
class SortField(BaseModel):
|
class SortField(BaseModel):
|
||||||
"""
|
"""Single sort field configuration."""
|
||||||
Single sort field configuration.
|
|
||||||
"""
|
|
||||||
field: str = Field(..., description="Field name to sort by")
|
field: str = Field(..., description="Field name to sort by")
|
||||||
direction: str = Field(..., description="Sort direction: 'asc' or 'desc'")
|
direction: str = Field(..., description="Sort direction: 'asc' or 'desc'")
|
||||||
|
|
||||||
|
|
@ -24,6 +105,14 @@ class SortField(BaseModel):
|
||||||
class PaginationParams(BaseModel):
|
class PaginationParams(BaseModel):
|
||||||
"""
|
"""
|
||||||
Complete pagination state including page, sorting, and filters.
|
Complete pagination state including page, sorting, and filters.
|
||||||
|
|
||||||
|
View extension (optional):
|
||||||
|
viewKey — Slug of a saved TableListView for this (user, contextKey) pair.
|
||||||
|
The server loads the view, merges its filters/sort/groupByLevels
|
||||||
|
into the effective query (request fields take priority over view
|
||||||
|
defaults for explicitly provided fields), and returns groupLayout
|
||||||
|
in the response when groupByLevels is non-empty.
|
||||||
|
Omit or set to None for the default (ungrouped) view.
|
||||||
"""
|
"""
|
||||||
page: int = Field(ge=1, description="Current page number (1-based)")
|
page: int = Field(ge=1, description="Current page number (1-based)")
|
||||||
pageSize: int = Field(ge=1, le=1000, description="Number of items per page")
|
pageSize: int = Field(ge=1, le=1000, description="Number of items per page")
|
||||||
|
|
@ -38,6 +127,17 @@ class PaginationParams(BaseModel):
|
||||||
- Supported operators: equals/eq, contains, startsWith, endsWith, gt, gte, lt, lte, in, notIn
|
- Supported operators: equals/eq, contains, startsWith, endsWith, gt, gte, lt, lte, in, notIn
|
||||||
- Multiple filters are combined with AND logic"""
|
- Multiple filters are combined with AND logic"""
|
||||||
)
|
)
|
||||||
|
viewKey: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Slug of a saved view to load; server merges view config into effective query",
|
||||||
|
)
|
||||||
|
groupByLevels: Optional[List[GroupByLevel]] = Field(
|
||||||
|
default=None,
|
||||||
|
description=(
|
||||||
|
"When set (including an empty list), replaces the saved view's groupByLevels for this request. "
|
||||||
|
"Omit entirely to use grouping from the view only."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class PaginationRequest(BaseModel):
|
class PaginationRequest(BaseModel):
|
||||||
|
|
@ -74,9 +174,24 @@ class PaginationMetadata(BaseModel):
|
||||||
class PaginatedResponse(BaseModel, Generic[T]):
|
class PaginatedResponse(BaseModel, Generic[T]):
|
||||||
"""
|
"""
|
||||||
Response containing paginated data and metadata.
|
Response containing paginated data and metadata.
|
||||||
|
|
||||||
|
groupLayout is included when the effective view has groupByLevels configured.
|
||||||
|
It describes how to render group header rows in the current page's items[].
|
||||||
|
Omitted (None) when no grouping is active.
|
||||||
|
|
||||||
|
appliedView describes which saved view was merged into this response,
|
||||||
|
allowing the frontend to synchronise its view selector.
|
||||||
"""
|
"""
|
||||||
items: List[T] = Field(..., description="Array of items for current page")
|
items: List[T] = Field(..., description="Array of items for current page")
|
||||||
pagination: Optional[PaginationMetadata] = Field(..., description="Pagination metadata (None if pagination not applied)")
|
pagination: Optional[PaginationMetadata] = Field(..., description="Pagination metadata (None if pagination not applied)")
|
||||||
|
groupLayout: Optional[GroupLayout] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Group band structure for this page (None if no grouping active)",
|
||||||
|
)
|
||||||
|
appliedView: Optional[AppliedViewMeta] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Metadata about the view applied to this response",
|
||||||
|
)
|
||||||
|
|
||||||
model_config = ConfigDict(arbitrary_types_allowed=True)
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||||
|
|
||||||
|
|
@ -84,30 +199,30 @@ class PaginatedResponse(BaseModel, Generic[T]):
|
||||||
def normalize_pagination_dict(pagination_dict: Dict[str, Any]) -> Dict[str, Any]:
|
def normalize_pagination_dict(pagination_dict: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Normalize pagination dictionary to handle frontend variations.
|
Normalize pagination dictionary to handle frontend variations.
|
||||||
Moves top-level "search" field into filters if present.
|
|
||||||
|
|
||||||
Args:
|
- Moves top-level "search" field into filters if present.
|
||||||
pagination_dict: Raw pagination dictionary from frontend
|
- Silently drops legacy fields (groupId, saveGroupTree) that were part of the
|
||||||
|
old tree-grouping implementation so old clients do not cause validation errors.
|
||||||
Returns:
|
- Passes viewKey through unchanged.
|
||||||
Normalized pagination dictionary ready for PaginationParams parsing
|
|
||||||
"""
|
"""
|
||||||
if not pagination_dict:
|
if not pagination_dict:
|
||||||
return pagination_dict
|
return pagination_dict
|
||||||
|
|
||||||
# Create a copy to avoid modifying the original
|
|
||||||
normalized = dict(pagination_dict)
|
normalized = dict(pagination_dict)
|
||||||
|
|
||||||
# Ensure required fields have sensible defaults
|
|
||||||
if "page" not in normalized:
|
if "page" not in normalized:
|
||||||
normalized["page"] = 1
|
normalized["page"] = 1
|
||||||
if "pageSize" not in normalized:
|
if "pageSize" not in normalized:
|
||||||
normalized["pageSize"] = 25
|
normalized["pageSize"] = 25
|
||||||
|
|
||||||
# Move top-level "search" into filters if present
|
# Move top-level "search" into filters
|
||||||
if "search" in normalized:
|
if "search" in normalized:
|
||||||
if "filters" not in normalized or normalized["filters"] is None:
|
if "filters" not in normalized or normalized["filters"] is None:
|
||||||
normalized["filters"] = {}
|
normalized["filters"] = {}
|
||||||
normalized["filters"]["search"] = normalized.pop("search")
|
normalized["filters"]["search"] = normalized.pop("search")
|
||||||
|
|
||||||
|
# Drop legacy tree-grouping fields — harmless if already absent
|
||||||
|
normalized.pop("groupId", None)
|
||||||
|
normalized.pop("saveGroupTree", None)
|
||||||
|
|
||||||
return normalized
|
return normalized
|
||||||
|
|
|
||||||
|
|
@ -63,9 +63,7 @@ class Role(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_visible": True,
|
"frontend_visible": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"frontend_fk_source": "/api/mandates/",
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
"frontend_fk_display_field": "label",
|
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
featureInstanceId: Optional[str] = Field(
|
featureInstanceId: Optional[str] = Field(
|
||||||
|
|
@ -77,9 +75,7 @@ class Role(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_visible": True,
|
"frontend_visible": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"frontend_fk_source": "/api/features/instances",
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
"frontend_fk_display_field": "label",
|
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
featureCode: Optional[str] = Field(
|
featureCode: Optional[str] = Field(
|
||||||
|
|
@ -115,9 +111,7 @@ class AccessRule(PowerOnModel):
|
||||||
"frontend_type": "select",
|
"frontend_type": "select",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"frontend_fk_source": "/api/rbac/roles",
|
"fk_target": {"db": "poweron_app", "table": "Role", "labelField": "roleLabel"},
|
||||||
"frontend_fk_display_field": "roleLabel",
|
|
||||||
"fk_target": {"db": "poweron_app", "table": "Role"},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
context: AccessRuleContext = Field(
|
context: AccessRuleContext = Field(
|
||||||
|
|
|
||||||
|
|
@ -47,7 +47,7 @@ class Token(PowerOnModel):
|
||||||
)
|
)
|
||||||
userId: str = Field(
|
userId: str = Field(
|
||||||
...,
|
...,
|
||||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||||
)
|
)
|
||||||
authority: AuthAuthority = Field(
|
authority: AuthAuthority = Field(
|
||||||
...,
|
...,
|
||||||
|
|
@ -56,7 +56,7 @@ class Token(PowerOnModel):
|
||||||
connectionId: Optional[str] = Field(
|
connectionId: Optional[str] = Field(
|
||||||
None,
|
None,
|
||||||
description="ID of the connection this token belongs to",
|
description="ID of the connection this token belongs to",
|
||||||
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection"}},
|
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection", "labelField": "externalUsername"}},
|
||||||
)
|
)
|
||||||
tokenPurpose: Optional[TokenPurpose] = Field(
|
tokenPurpose: Optional[TokenPurpose] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
|
|
@ -73,7 +73,7 @@ class Token(PowerOnModel):
|
||||||
)
|
)
|
||||||
expiresAt: float = Field(
|
expiresAt: float = Field(
|
||||||
description="When the token expires (UTC timestamp in seconds)",
|
description="When the token expires (UTC timestamp in seconds)",
|
||||||
json_schema_extra={"label": "Laeuft ab am"},
|
json_schema_extra={"label": "Laeuft ab am", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
tokenRefresh: Optional[str] = Field(
|
tokenRefresh: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
|
|
@ -87,12 +87,12 @@ class Token(PowerOnModel):
|
||||||
revokedAt: Optional[float] = Field(
|
revokedAt: Optional[float] = Field(
|
||||||
None,
|
None,
|
||||||
description="When the token was revoked (UTC timestamp in seconds)",
|
description="When the token was revoked (UTC timestamp in seconds)",
|
||||||
json_schema_extra={"label": "Widerrufen am"},
|
json_schema_extra={"label": "Widerrufen am", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
revokedBy: Optional[str] = Field(
|
revokedBy: Optional[str] = Field(
|
||||||
None,
|
None,
|
||||||
description="User ID who revoked the token (admin/self)",
|
description="User ID who revoked the token (admin/self)",
|
||||||
json_schema_extra={"label": "Widerrufen von", "fk_target": {"db": "poweron_app", "table": "User"}},
|
json_schema_extra={"label": "Widerrufen von", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||||
)
|
)
|
||||||
reason: Optional[str] = Field(
|
reason: Optional[str] = Field(
|
||||||
None,
|
None,
|
||||||
|
|
@ -139,7 +139,7 @@ class AuthEvent(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
eventType: str = Field(
|
eventType: str = Field(
|
||||||
|
|
@ -149,7 +149,7 @@ class AuthEvent(PowerOnModel):
|
||||||
timestamp: float = Field(
|
timestamp: float = Field(
|
||||||
default_factory=getUtcTimestamp,
|
default_factory=getUtcTimestamp,
|
||||||
description="Unix timestamp when the event occurred",
|
description="Unix timestamp when the event occurred",
|
||||||
json_schema_extra={"label": "Zeitstempel", "frontend_type": "datetime", "frontend_readonly": True, "frontend_required": True},
|
json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": True},
|
||||||
)
|
)
|
||||||
ipAddress: Optional[str] = Field(
|
ipAddress: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
|
|
|
||||||
|
|
@ -207,7 +207,7 @@ class MandateSubscription(PowerOnModel):
|
||||||
mandateId: str = Field(
|
mandateId: str = Field(
|
||||||
...,
|
...,
|
||||||
description="Foreign key to Mandate",
|
description="Foreign key to Mandate",
|
||||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
planKey: str = Field(
|
planKey: str = Field(
|
||||||
...,
|
...,
|
||||||
|
|
@ -226,35 +226,35 @@ class MandateSubscription(PowerOnModel):
|
||||||
json_schema_extra={"label": "Wiederkehrend"},
|
json_schema_extra={"label": "Wiederkehrend"},
|
||||||
)
|
)
|
||||||
|
|
||||||
startedAt: datetime = Field(
|
startedAt: float = Field(
|
||||||
default_factory=lambda: datetime.now(timezone.utc),
|
default_factory=lambda: datetime.now(timezone.utc).timestamp(),
|
||||||
description="Record creation timestamp",
|
description="Record creation timestamp (UTC unix)",
|
||||||
json_schema_extra={"label": "Gestartet"},
|
json_schema_extra={"label": "Gestartet", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
effectiveFrom: Optional[datetime] = Field(
|
effectiveFrom: Optional[float] = Field(
|
||||||
None,
|
None,
|
||||||
description="When this subscription becomes operative. None = immediate. Set for SCHEDULED subs.",
|
description="When this subscription becomes operative (UTC unix). None = immediate.",
|
||||||
json_schema_extra={"label": "Wirksam ab"},
|
json_schema_extra={"label": "Wirksam ab", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
endedAt: Optional[datetime] = Field(
|
endedAt: Optional[float] = Field(
|
||||||
None,
|
None,
|
||||||
description="When subscription ended (terminal)",
|
description="When subscription ended (UTC unix)",
|
||||||
json_schema_extra={"label": "Beendet"},
|
json_schema_extra={"label": "Beendet", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
currentPeriodStart: Optional[datetime] = Field(
|
currentPeriodStart: Optional[float] = Field(
|
||||||
None,
|
None,
|
||||||
description="Current billing period start (synced from Stripe)",
|
description="Current billing period start (UTC unix, synced from Stripe)",
|
||||||
json_schema_extra={"label": "Periodenbeginn"},
|
json_schema_extra={"label": "Periodenbeginn", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
currentPeriodEnd: Optional[datetime] = Field(
|
currentPeriodEnd: Optional[float] = Field(
|
||||||
None,
|
None,
|
||||||
description="Current billing period end (synced from Stripe)",
|
description="Current billing period end (UTC unix, synced from Stripe)",
|
||||||
json_schema_extra={"label": "Periodenende"},
|
json_schema_extra={"label": "Periodenende", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
trialEndsAt: Optional[datetime] = Field(
|
trialEndsAt: Optional[float] = Field(
|
||||||
None,
|
None,
|
||||||
description="Trial expiry timestamp",
|
description="Trial expiry timestamp (UTC unix)",
|
||||||
json_schema_extra={"label": "Trial endet"},
|
json_schema_extra={"label": "Trial endet", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
|
|
||||||
snapshotPricePerUserCHF: float = Field(
|
snapshotPricePerUserCHF: float = Field(
|
||||||
|
|
@ -407,7 +407,7 @@ BUILTIN_PLANS: Dict[str, SubscriptionPlan] = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def _getPlan(planKey: str) -> Optional[SubscriptionPlan]:
|
def getPlan(planKey: str) -> Optional[SubscriptionPlan]:
|
||||||
"""Resolve a plan by key from the built-in catalog."""
|
"""Resolve a plan by key from the built-in catalog."""
|
||||||
return BUILTIN_PLANS.get(planKey)
|
return BUILTIN_PLANS.get(planKey)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -32,6 +32,7 @@ class AuthAuthority(str, Enum):
|
||||||
GOOGLE = "google"
|
GOOGLE = "google"
|
||||||
MSFT = "msft"
|
MSFT = "msft"
|
||||||
CLICKUP = "clickup"
|
CLICKUP = "clickup"
|
||||||
|
INFOMANIAK = "infomaniak"
|
||||||
|
|
||||||
class ConnectionStatus(str, Enum):
|
class ConnectionStatus(str, Enum):
|
||||||
ACTIVE = "active"
|
ACTIVE = "active"
|
||||||
|
|
@ -397,7 +398,7 @@ class UserConnection(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Benutzer-ID",
|
"label": "Benutzer-ID",
|
||||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
authority: AuthAuthority = Field(
|
authority: AuthAuthority = Field(
|
||||||
|
|
@ -474,6 +475,22 @@ class UserConnection(PowerOnModel):
|
||||||
description="OAuth scopes granted for this connection",
|
description="OAuth scopes granted for this connection",
|
||||||
json_schema_extra={"frontend_type": "list", "frontend_readonly": True, "frontend_required": False, "label": "Gewährte Berechtigungen"},
|
json_schema_extra={"frontend_type": "list", "frontend_readonly": True, "frontend_required": False, "label": "Gewährte Berechtigungen"},
|
||||||
)
|
)
|
||||||
|
knowledgeIngestionEnabled: bool = Field(
|
||||||
|
default=False,
|
||||||
|
description="Whether the user has consented to knowledge ingestion for this connection",
|
||||||
|
json_schema_extra={"frontend_type": "boolean", "frontend_readonly": False, "frontend_required": False, "label": "Wissensdatenbank aktiv"},
|
||||||
|
)
|
||||||
|
knowledgePreferences: Optional[Dict[str, Any]] = Field(
|
||||||
|
default=None,
|
||||||
|
description=(
|
||||||
|
"Per-connection knowledge ingestion preferences. schemaVersion=1 keys: "
|
||||||
|
"neutralizeBeforeEmbed (bool), mailContentDepth (metadata|snippet|full), "
|
||||||
|
"mailIndexAttachments (bool), filesIndexBinaries (bool), mimeAllowlist (list[str]), "
|
||||||
|
"clickupScope (titles|title_description|with_comments), "
|
||||||
|
"surfaceToggles (dict per authority), maxAgeDays (int)."
|
||||||
|
),
|
||||||
|
json_schema_extra={"frontend_type": "json", "frontend_readonly": False, "frontend_required": False, "label": "Wissenspräferenzen"},
|
||||||
|
)
|
||||||
|
|
||||||
@computed_field
|
@computed_field
|
||||||
@property
|
@property
|
||||||
|
|
@ -646,11 +663,11 @@ class UserInDB(User):
|
||||||
resetTokenExpires: Optional[float] = Field(
|
resetTokenExpires: Optional[float] = Field(
|
||||||
None,
|
None,
|
||||||
description="Reset token expiration (UTC timestamp in seconds)",
|
description="Reset token expiration (UTC timestamp in seconds)",
|
||||||
json_schema_extra={"label": "Token läuft ab"},
|
json_schema_extra={"label": "Token läuft ab", "frontend_type": "timestamp"},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _normalizeTtsVoiceMap(value: Any) -> Optional[Dict[str, str]]:
|
def normalizeTtsVoiceMap(value: Any) -> Optional[Dict[str, str]]:
|
||||||
"""
|
"""
|
||||||
Coerce ttsVoiceMap payloads to Dict[str, str].
|
Coerce ttsVoiceMap payloads to Dict[str, str].
|
||||||
|
|
||||||
|
|
@ -687,12 +704,12 @@ class UserVoicePreferences(PowerOnModel):
|
||||||
)
|
)
|
||||||
userId: str = Field(
|
userId: str = Field(
|
||||||
description="User ID",
|
description="User ID",
|
||||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||||
)
|
)
|
||||||
mandateId: Optional[str] = Field(
|
mandateId: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Mandate scope (None = global for user)",
|
description="Mandate scope (None = global for user)",
|
||||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
sttLanguage: str = Field(
|
sttLanguage: str = Field(
|
||||||
default="de-DE",
|
default="de-DE",
|
||||||
|
|
@ -728,6 +745,6 @@ class UserVoicePreferences(PowerOnModel):
|
||||||
@field_validator("ttsVoiceMap", mode="before")
|
@field_validator("ttsVoiceMap", mode="before")
|
||||||
@classmethod
|
@classmethod
|
||||||
def _validateTtsVoiceMap(cls, value: Any) -> Optional[Dict[str, str]]:
|
def _validateTtsVoiceMap(cls, value: Any) -> Optional[Dict[str, str]]:
|
||||||
return _normalizeTtsVoiceMap(value)
|
return normalizeTtsVoiceMap(value)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -14,8 +14,8 @@ from modules.datamodels.datamodelExtraction import ContentExtracted, ContentPart
|
||||||
class UdmMetadata(BaseModel):
|
class UdmMetadata(BaseModel):
|
||||||
title: Optional[str] = None
|
title: Optional[str] = None
|
||||||
author: Optional[str] = None
|
author: Optional[str] = None
|
||||||
createdAt: Optional[str] = None
|
createdAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||||
modifiedAt: Optional[str] = None
|
modifiedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||||
sourcePath: str = ""
|
sourcePath: str = ""
|
||||||
tags: List[str] = Field(default_factory=list)
|
tags: List[str] = Field(default_factory=list)
|
||||||
custom: Dict[str, Any] = Field(default_factory=dict)
|
custom: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
|
@ -177,7 +177,7 @@ def _groupKeyForPart(part: ContentPart) -> Tuple[str, int, str]:
|
||||||
_VALID_DOC_SOURCES = frozenset({"pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"})
|
_VALID_DOC_SOURCES = frozenset({"pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"})
|
||||||
|
|
||||||
|
|
||||||
def _contentPartsToUdm(extracted: ContentExtracted, sourceType: str, sourcePath: str) -> UdmDocument:
|
def contentPartsToUdm(extracted: ContentExtracted, sourceType: str, sourcePath: str) -> UdmDocument:
|
||||||
"""Convert flat ContentPart list into a UdmDocument using structural heuristics."""
|
"""Convert flat ContentPart list into a UdmDocument using structural heuristics."""
|
||||||
parts = list(extracted.parts or [])
|
parts = list(extracted.parts or [])
|
||||||
st: Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"] = (
|
st: Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"] = (
|
||||||
|
|
@ -290,7 +290,7 @@ def _stripUdmForReferences(udm: UdmDocument) -> UdmDocument:
|
||||||
return clone
|
return clone
|
||||||
|
|
||||||
|
|
||||||
def _applyUdmOutputDetail(udm: UdmDocument, detail: str) -> UdmDocument:
|
def applyUdmOutputDetail(udm: UdmDocument, detail: str) -> UdmDocument:
|
||||||
if detail == "structure":
|
if detail == "structure":
|
||||||
return _stripUdmRaw(udm)
|
return _stripUdmRaw(udm)
|
||||||
if detail == "references":
|
if detail == "references":
|
||||||
|
|
@ -298,7 +298,7 @@ def _applyUdmOutputDetail(udm: UdmDocument, detail: str) -> UdmDocument:
|
||||||
return udm
|
return udm
|
||||||
|
|
||||||
|
|
||||||
def _mimeToUdmSourceType(mimeType: str, fileName: str) -> Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"]:
|
def mimeToUdmSourceType(mimeType: str, fileName: str) -> Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"]:
|
||||||
m = (mimeType or "").lower()
|
m = (mimeType or "").lower()
|
||||||
fn = (fileName or "").lower()
|
fn = (fileName or "").lower()
|
||||||
if m == "application/pdf" or fn.endswith(".pdf"):
|
if m == "application/pdf" or fn.endswith(".pdf"):
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,7 @@ class Prompt(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
isSystem: bool = Field(
|
isSystem: bool = Field(
|
||||||
|
|
|
||||||
311
modules/datamodels/datamodelViews.py
Normal file
311
modules/datamodels/datamodelViews.py
Normal file
|
|
@ -0,0 +1,311 @@
|
||||||
|
# Copyright (c) 2025 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""
|
||||||
|
View models for the /api/attributes/ endpoint.
|
||||||
|
|
||||||
|
These extend base DB models with computed / enriched fields that the gateway
|
||||||
|
adds at response time (JOINs, aggregations, synthetics). They are NEVER used
|
||||||
|
for DB operations — only for ``getModelAttributeDefinitions()`` so the frontend
|
||||||
|
can resolve column types via ``resolveColumnTypes`` without hardcoding.
|
||||||
|
|
||||||
|
Naming convention: ``{BaseModel}View``.
|
||||||
|
|
||||||
|
``getModelClasses()`` in ``attributeUtils.py`` auto-discovers every
|
||||||
|
``datamodel*.py`` under ``modules/datamodels/`` — so placing them here is
|
||||||
|
sufficient for registration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Optional, List
|
||||||
|
from pydantic import Field
|
||||||
|
|
||||||
|
from modules.datamodels.datamodelBase import MODEL_REGISTRY, PowerOnModel
|
||||||
|
from modules.datamodels.datamodelMembership import UserMandate, FeatureAccess
|
||||||
|
from modules.datamodels.datamodelBilling import BillingTransaction
|
||||||
|
from modules.datamodels.datamodelSubscription import MandateSubscription
|
||||||
|
from modules.datamodels.datamodelUiLanguage import UiLanguageSet
|
||||||
|
from modules.datamodels.datamodelRbac import Role
|
||||||
|
from modules.features.neutralization.datamodelFeatureNeutralizer import DataNeutralizerAttributes
|
||||||
|
from modules.shared.i18nRegistry import i18nModel
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Punkt 1a: UserMandate + enriched user fields
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@i18nModel("Benutzer-Mandant (Ansicht)")
|
||||||
|
class UserMandateView(UserMandate):
|
||||||
|
"""UserMandate erweitert um aufgeloeste Benutzerfelder und Rollenlabels."""
|
||||||
|
|
||||||
|
username: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Username (resolved from userId)",
|
||||||
|
json_schema_extra={"label": "Benutzername", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
email: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="E-Mail address (resolved from userId)",
|
||||||
|
json_schema_extra={"label": "E-Mail", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
fullName: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Full name (resolved from userId)",
|
||||||
|
json_schema_extra={"label": "Vollstaendiger Name", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
roleLabels: Optional[List[str]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Role labels (resolved from junction table)",
|
||||||
|
json_schema_extra={"label": "Rollen", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Punkt 1b: FeatureAccess + enriched user fields
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@i18nModel("Feature-Zugang (Ansicht)")
|
||||||
|
class FeatureAccessView(FeatureAccess):
|
||||||
|
"""FeatureAccess erweitert um aufgeloeste Benutzerfelder und Rollenlabels."""
|
||||||
|
|
||||||
|
username: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Username (resolved from userId)",
|
||||||
|
json_schema_extra={"label": "Benutzername", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
email: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="E-Mail address (resolved from userId)",
|
||||||
|
json_schema_extra={"label": "E-Mail", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
fullName: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Full name (resolved from userId)",
|
||||||
|
json_schema_extra={"label": "Vollstaendiger Name", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
roleLabels: Optional[List[str]] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Role labels (resolved from junction table)",
|
||||||
|
json_schema_extra={"label": "Rollen", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Punkt 1d: BillingTransaction + enriched mandate/user names
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@i18nModel("Transaktion (Ansicht)")
|
||||||
|
class BillingTransactionView(BillingTransaction):
|
||||||
|
"""BillingTransaction erweitert um aufgeloeste Mandanten-/Benutzernamen."""
|
||||||
|
|
||||||
|
mandateName: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Mandate name (resolved from accountId/mandateId)",
|
||||||
|
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
userName: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="User name (resolved from createdByUserId)",
|
||||||
|
json_schema_extra={"label": "Benutzer", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Punkt 3a: MandateSubscription + aggregated fields
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@i18nModel("Abonnement (Ansicht)")
|
||||||
|
class MandateSubscriptionView(MandateSubscription):
|
||||||
|
"""MandateSubscription erweitert um aggregierte Laufzeitwerte."""
|
||||||
|
|
||||||
|
mandateName: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Mandate name (resolved from mandateId)",
|
||||||
|
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
planTitle: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Plan title (resolved from planKey)",
|
||||||
|
json_schema_extra={"label": "Plan", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
activeUsers: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Number of active users in the mandate",
|
||||||
|
json_schema_extra={"label": "Benutzer", "frontend_type": "number", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
activeInstances: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Number of active feature instances in the mandate",
|
||||||
|
json_schema_extra={"label": "Module", "frontend_type": "number", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
monthlyRevenueCHF: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Calculated monthly revenue in CHF",
|
||||||
|
json_schema_extra={"label": "Umsatz pro Monat", "frontend_type": "number", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Punkt 3b: UiLanguageSet + computed counts
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@i18nModel("Sprachset (Ansicht)")
|
||||||
|
class UiLanguageSetView(UiLanguageSet):
|
||||||
|
"""UiLanguageSet erweitert um berechnete Uebersetzungszaehler."""
|
||||||
|
|
||||||
|
uiCount: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Number of UI translation entries",
|
||||||
|
json_schema_extra={"label": "UI", "frontend_type": "number", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
gatewayCount: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Number of gateway/API translation entries",
|
||||||
|
json_schema_extra={"label": "API", "frontend_type": "number", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
entriesCount: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Total number of translation entries",
|
||||||
|
json_schema_extra={"label": "Gesamt", "frontend_type": "number", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Punkt 1c: DataNeutralizerAttributes + enriched fields
|
||||||
|
#
|
||||||
|
# DataNeutralizerAttributes extends BaseModel (not PowerOnModel), so its
|
||||||
|
# subclass does NOT auto-register in MODEL_REGISTRY. We register manually.
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@i18nModel("Neutralisierungs-Zuordnung (Ansicht)")
|
||||||
|
class DataNeutralizerAttributesView(DataNeutralizerAttributes):
|
||||||
|
"""DataNeutralizerAttributes erweitert um synthetische/aufgeloeste Felder."""
|
||||||
|
|
||||||
|
placeholder: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Synthetic placeholder string [patternType.id]",
|
||||||
|
json_schema_extra={"label": "Platzhalter", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
username: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Username (resolved from userId)",
|
||||||
|
json_schema_extra={"label": "Benutzer", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
instanceLabel: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Feature instance label (resolved from featureInstanceId)",
|
||||||
|
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Manual registration for non-PowerOnModel view
|
||||||
|
MODEL_REGISTRY["DataNeutralizerAttributesView"] = DataNeutralizerAttributesView # type: ignore[assignment]
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Role view — admin RBAC list with computed `scopeType` + `userCount`
|
||||||
|
#
|
||||||
|
# `scopeType` is computed in the route from (mandateId, isSystemRole). Exposed
|
||||||
|
# here as a pure `select` field so the frontend renders the user-facing label
|
||||||
|
# from `frontend_options` (no hardcoded mapping in the page).
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
@i18nModel("Rolle (Ansicht)")
|
||||||
|
class RoleView(Role):
|
||||||
|
"""Role extended with computed scope information for the admin UI."""
|
||||||
|
|
||||||
|
scopeType: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Computed scope: 'system' (template), 'global', or 'mandate'.",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Geltungsbereich",
|
||||||
|
"frontend_type": "select",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
"frontend_options": [
|
||||||
|
{"value": "system", "label": "System-Template"},
|
||||||
|
{"value": "global", "label": "Template"},
|
||||||
|
{"value": "mandate", "label": "Mandant"},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
userCount: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Number of users assigned to this role (via UserMandateRole).",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Benutzer",
|
||||||
|
"frontend_type": "number",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Automation Workflow — dashboard view with synthesized fields
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import AutoWorkflow
|
||||||
|
|
||||||
|
|
||||||
|
@i18nModel("Workflow (Ansicht)")
|
||||||
|
class Automation2WorkflowView(AutoWorkflow):
|
||||||
|
"""AutoWorkflow extended with computed dashboard fields.
|
||||||
|
|
||||||
|
Used exclusively for /api/attributes/ so the frontend can resolve column
|
||||||
|
types for the workflow dashboard table (FormGeneratorTable).
|
||||||
|
"""
|
||||||
|
|
||||||
|
sysCreatedAt: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Record creation timestamp (UTC)",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Erstellt",
|
||||||
|
"frontend_type": "timestamp",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
lastStartedAt: Optional[float] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Timestamp of the most recent workflow run start",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Zuletzt gestartet",
|
||||||
|
"frontend_type": "timestamp",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
runCount: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Total number of runs for this workflow",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Laeufe",
|
||||||
|
"frontend_type": "number",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_required": False,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
mandateLabel: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Mandate name (resolved from mandateId)",
|
||||||
|
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
instanceLabel: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Feature instance label (resolved from featureInstanceId)",
|
||||||
|
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
featureCode: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Feature code of the owning instance",
|
||||||
|
json_schema_extra={"label": "Feature", "frontend_type": "text", "frontend_readonly": True},
|
||||||
|
)
|
||||||
|
isRunning: Optional[bool] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Whether the workflow currently has an active run",
|
||||||
|
json_schema_extra={
|
||||||
|
"label": "Läuft",
|
||||||
|
"frontend_type": "checkbox",
|
||||||
|
"frontend_readonly": True,
|
||||||
|
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
@ -22,9 +22,24 @@ class WorkflowActionParameter(BaseModel):
|
||||||
json_schema_extra={"label": "Name"},
|
json_schema_extra={"label": "Name"},
|
||||||
)
|
)
|
||||||
type: str = Field(
|
type: str = Field(
|
||||||
description="Python type as string: 'str', 'int', 'bool', 'List[str]', etc.",
|
description=(
|
||||||
|
"Type reference. Either a primitive ('str', 'int', 'bool', 'float', 'Any', "
|
||||||
|
"'List[str]', 'Dict[str,Any]', …) or a PORT_TYPE_CATALOG schema name "
|
||||||
|
"(e.g. 'ConnectionRef', 'FeatureInstanceRef', 'DocumentList', "
|
||||||
|
"'TrusteeProcessResult'). Catalog types are validated by "
|
||||||
|
"_actionSignatureValidator at startup."
|
||||||
|
),
|
||||||
json_schema_extra={"label": "Typ"},
|
json_schema_extra={"label": "Typ"},
|
||||||
)
|
)
|
||||||
|
uiHint: Optional[str] = Field(
|
||||||
|
None,
|
||||||
|
description=(
|
||||||
|
"Optional UI rendering hint for adapters. "
|
||||||
|
"Free-form (e.g. 'textarea', 'cron', 'fieldBuilder'). "
|
||||||
|
"Adapters can override; defaults derive from frontendType when absent."
|
||||||
|
),
|
||||||
|
json_schema_extra={"label": "UI-Hinweis"},
|
||||||
|
)
|
||||||
frontendType: FrontendType = Field(
|
frontendType: FrontendType = Field(
|
||||||
description="UI rendering type (from global FrontendType enum)",
|
description="UI rendering type (from global FrontendType enum)",
|
||||||
json_schema_extra={"label": "Frontend-Typ"},
|
json_schema_extra={"label": "Frontend-Typ"},
|
||||||
|
|
@ -80,6 +95,16 @@ class WorkflowActionDefinition(BaseModel):
|
||||||
description="Parameter schema definitions",
|
description="Parameter schema definitions",
|
||||||
json_schema_extra={"label": "Parameter"},
|
json_schema_extra={"label": "Parameter"},
|
||||||
)
|
)
|
||||||
|
outputType: str = Field(
|
||||||
|
"ActionResult",
|
||||||
|
description=(
|
||||||
|
"PORT_TYPE_CATALOG schema name produced by this action "
|
||||||
|
"(e.g. 'TrusteeProcessResult', 'EmailDraft', 'DocumentList'). "
|
||||||
|
"Defaults to 'ActionResult' for fire-and-forget actions. "
|
||||||
|
"Validated by _actionSignatureValidator at startup."
|
||||||
|
),
|
||||||
|
json_schema_extra={"label": "Ausgabe-Typ"},
|
||||||
|
)
|
||||||
execute: Optional[Callable] = Field(
|
execute: Optional[Callable] = Field(
|
||||||
None,
|
None,
|
||||||
description="Execution function - async function that takes parameters dict and returns ActionResult. Set dynamically.",
|
description="Execution function - async function that takes parameters dict and returns ActionResult. Set dynamically.",
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
Demo Configs — Auto-Discovery Module
|
Demo Configs — Auto-Discovery Module
|
||||||
|
|
||||||
Scans this folder for Python files that contain subclasses of _BaseDemoConfig
|
Scans this folder for Python files that contain subclasses of _BaseDemoConfig
|
||||||
and exposes them via _getAvailableDemoConfigs().
|
and exposes them via getAvailableDemoConfigs().
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import importlib
|
import importlib
|
||||||
|
|
@ -18,7 +18,7 @@ logger = logging.getLogger(__name__)
|
||||||
_configCache: Dict[str, _BaseDemoConfig] = {}
|
_configCache: Dict[str, _BaseDemoConfig] = {}
|
||||||
|
|
||||||
|
|
||||||
def _getAvailableDemoConfigs() -> Dict[str, _BaseDemoConfig]:
|
def getAvailableDemoConfigs() -> Dict[str, _BaseDemoConfig]:
|
||||||
"""Return a dict of code -> instance for every discovered demo config."""
|
"""Return a dict of code -> instance for every discovered demo config."""
|
||||||
if _configCache:
|
if _configCache:
|
||||||
return _configCache
|
return _configCache
|
||||||
|
|
@ -43,7 +43,7 @@ def _getAvailableDemoConfigs() -> Dict[str, _BaseDemoConfig]:
|
||||||
return _configCache
|
return _configCache
|
||||||
|
|
||||||
|
|
||||||
def _getDemoConfigByCode(code: str) -> _BaseDemoConfig | None:
|
def getDemoConfigByCode(code: str) -> _BaseDemoConfig | None:
|
||||||
"""Get a specific demo config by its code."""
|
"""Get a specific demo config by its code."""
|
||||||
configs = _getAvailableDemoConfigs()
|
configs = getAvailableDemoConfigs()
|
||||||
return configs.get(code)
|
return configs.get(code)
|
||||||
|
|
|
||||||
|
|
@ -4,11 +4,16 @@ Base class for demo configurations.
|
||||||
Each demo config file in this folder extends _BaseDemoConfig and provides
|
Each demo config file in this folder extends _BaseDemoConfig and provides
|
||||||
idempotent load() and remove() methods for setting up / tearing down
|
idempotent load() and remove() methods for setting up / tearing down
|
||||||
a complete demo environment (mandates, users, features, test data, etc.).
|
a complete demo environment (mandates, users, features, test data, etc.).
|
||||||
|
|
||||||
|
Subclasses MUST also declare ``credentials`` so the SysAdmin who triggers a
|
||||||
|
demo-load gets the initial username / password pair shown in the UI -- this
|
||||||
|
avoids the "where do I find the password?" anti-pattern of having to grep the
|
||||||
|
source code.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Dict, Any
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
@ -20,6 +25,13 @@ class _BaseDemoConfig(ABC):
|
||||||
label: str = ""
|
label: str = ""
|
||||||
description: str = ""
|
description: str = ""
|
||||||
|
|
||||||
|
# Each entry describes one bootstrapped login that the demo creates.
|
||||||
|
# Shape: {"role": "Demo-Sachbearbeiter", "username": "pwg.demo",
|
||||||
|
# "email": "pwg.demo@poweron.swiss", "password": "pwg.demo.2026"}
|
||||||
|
# Surfaced via GET /api/admin/demo-config and inside the load() summary
|
||||||
|
# so the AdminDemoConfigPage can display it (no source-code grep needed).
|
||||||
|
credentials: List[Dict[str, str]] = []
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def load(self, db) -> Dict[str, Any]:
|
def load(self, db) -> Dict[str, Any]:
|
||||||
"""Create all demo data (idempotent). Returns summary dict."""
|
"""Create all demo data (idempotent). Returns summary dict."""
|
||||||
|
|
@ -35,4 +47,5 @@ class _BaseDemoConfig(ABC):
|
||||||
"code": self.code,
|
"code": self.code,
|
||||||
"label": self.label,
|
"label": self.label,
|
||||||
"description": self.description,
|
"description": self.description,
|
||||||
|
"credentials": list(self.credentials or []),
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -64,6 +64,14 @@ class InvestorDemo2026(_BaseDemoConfig):
|
||||||
"Two mandates (HappyLife AG + Alpina Treuhand AG), one SysAdmin user, "
|
"Two mandates (HappyLife AG + Alpina Treuhand AG), one SysAdmin user, "
|
||||||
"trustee with RMA, workspace, graph editor, and neutralization."
|
"trustee with RMA, workspace, graph editor, and neutralization."
|
||||||
)
|
)
|
||||||
|
credentials = [
|
||||||
|
{
|
||||||
|
"role": "SysAdmin Demo",
|
||||||
|
"username": _USER["username"],
|
||||||
|
"email": _USER["email"],
|
||||||
|
"password": _USER["password"],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
# load
|
# load
|
||||||
|
|
@ -101,6 +109,10 @@ class InvestorDemo2026(_BaseDemoConfig):
|
||||||
logger.error(f"Demo load failed: {e}", exc_info=True)
|
logger.error(f"Demo load failed: {e}", exc_info=True)
|
||||||
summary["errors"].append(str(e))
|
summary["errors"].append(str(e))
|
||||||
|
|
||||||
|
# Surface initial credentials so the SysAdmin doesn't have to grep the
|
||||||
|
# source code -- consumed by AdminDemoConfigPage to render a copyable
|
||||||
|
# login box in the result banner.
|
||||||
|
summary["credentials"] = list(self.credentials)
|
||||||
return summary
|
return summary
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
|
|
@ -268,10 +280,17 @@ class InvestorDemo2026(_BaseDemoConfig):
|
||||||
logger.error(f"Failed to create feature '{instanceLabel}' ({code}) in {mandateLabel}: {e}")
|
logger.error(f"Failed to create feature '{instanceLabel}' ({code}) in {mandateLabel}: {e}")
|
||||||
|
|
||||||
def _ensureFeatureAccess(self, db, userId: str, mandateId: str, mandateLabel: str, summary: Dict):
|
def _ensureFeatureAccess(self, db, userId: str, mandateId: str, mandateLabel: str, summary: Dict):
|
||||||
"""Grant the demo user admin access to every feature instance in the mandate."""
|
"""Grant the demo user admin access on EVERY feature instance of the
|
||||||
|
mandate. Without an explicit ``FeatureAccess`` + ``{code}-admin`` role
|
||||||
|
the user does not see any feature tile in the UI -- so this method
|
||||||
|
ALSO heals a half-broken state by re-copying the per-feature template
|
||||||
|
roles if they are missing (e.g. when the instance was created via an
|
||||||
|
older code path that skipped ``copyTemplateRoles``).
|
||||||
|
"""
|
||||||
from modules.datamodels.datamodelFeatures import FeatureInstance
|
from modules.datamodels.datamodelFeatures import FeatureInstance
|
||||||
from modules.datamodels.datamodelMembership import FeatureAccess, FeatureAccessRole
|
from modules.datamodels.datamodelMembership import FeatureAccess, FeatureAccessRole
|
||||||
from modules.datamodels.datamodelRbac import Role
|
from modules.datamodels.datamodelRbac import Role
|
||||||
|
from modules.interfaces.interfaceFeatures import getFeatureInterface
|
||||||
|
|
||||||
instances = db.getRecordset(FeatureInstance, recordFilter={"mandateId": mandateId}) or []
|
instances = db.getRecordset(FeatureInstance, recordFilter={"mandateId": mandateId}) or []
|
||||||
|
|
||||||
|
|
@ -297,16 +316,50 @@ class InvestorDemo2026(_BaseDemoConfig):
|
||||||
"featureInstanceId": instId,
|
"featureInstanceId": instId,
|
||||||
"roleLabel": adminRoleLabel,
|
"roleLabel": adminRoleLabel,
|
||||||
})
|
})
|
||||||
if adminRoles:
|
|
||||||
adminRoleId = adminRoles[0].get("id")
|
# Self-heal: if the per-feature admin role does not exist on this
|
||||||
existingRole = db.getRecordset(FeatureAccessRole, recordFilter={
|
# instance the template roles were never copied -- copy them now.
|
||||||
"featureAccessId": featureAccessId,
|
if not adminRoles:
|
||||||
"roleId": adminRoleId,
|
logger.warning(
|
||||||
|
"Feature instance %s (%s) is missing role '%s' -- "
|
||||||
|
"re-copying template roles", instId, featureCode, adminRoleLabel,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
fi = getFeatureInterface(db)
|
||||||
|
fi._copyTemplateRoles(featureCode, mandateId, instId)
|
||||||
|
summary["created"].append(
|
||||||
|
f"Repaired template roles for {featureCode} in {mandateLabel}"
|
||||||
|
)
|
||||||
|
except Exception as repairErr:
|
||||||
|
summary["errors"].append(
|
||||||
|
f"Could not repair template roles for {featureCode} "
|
||||||
|
f"in {mandateLabel}: {repairErr}"
|
||||||
|
)
|
||||||
|
adminRoles = db.getRecordset(Role, recordFilter={
|
||||||
|
"featureInstanceId": instId,
|
||||||
|
"roleLabel": adminRoleLabel,
|
||||||
})
|
})
|
||||||
if not existingRole:
|
|
||||||
far = FeatureAccessRole(featureAccessId=featureAccessId, roleId=adminRoleId)
|
if not adminRoles:
|
||||||
db.recordCreate(FeatureAccessRole, far)
|
summary["errors"].append(
|
||||||
logger.info(f"Assigned {adminRoleLabel} role in {mandateLabel}")
|
f"Admin role '{adminRoleLabel}' not found for feature "
|
||||||
|
f"instance {featureCode} in {mandateLabel} -- demo user "
|
||||||
|
f"will not see this feature."
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
adminRoleId = adminRoles[0].get("id")
|
||||||
|
existingRole = db.getRecordset(FeatureAccessRole, recordFilter={
|
||||||
|
"featureAccessId": featureAccessId,
|
||||||
|
"roleId": adminRoleId,
|
||||||
|
})
|
||||||
|
if not existingRole:
|
||||||
|
far = FeatureAccessRole(featureAccessId=featureAccessId, roleId=adminRoleId)
|
||||||
|
db.recordCreate(FeatureAccessRole, far)
|
||||||
|
summary["created"].append(
|
||||||
|
f"Role '{adminRoleLabel}' assigned to demo user in {mandateLabel}"
|
||||||
|
)
|
||||||
|
logger.info(f"Assigned {adminRoleLabel} role in {mandateLabel}")
|
||||||
|
|
||||||
def _ensureTrusteeRmaConfig(self, db, mandateId: Optional[str], mandateLabel: str, summary: Dict):
|
def _ensureTrusteeRmaConfig(self, db, mandateId: Optional[str], mandateLabel: str, summary: Dict):
|
||||||
if not mandateId:
|
if not mandateId:
|
||||||
|
|
@ -394,10 +447,10 @@ class InvestorDemo2026(_BaseDemoConfig):
|
||||||
if not mandateId:
|
if not mandateId:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
from modules.interfaces.interfaceDbBilling import getRootInterface
|
||||||
from modules.datamodels.datamodelBilling import BillingSettings
|
from modules.datamodels.datamodelBilling import BillingSettings
|
||||||
|
|
||||||
billingInterface = _getRootInterface()
|
billingInterface = getRootInterface()
|
||||||
existingSettings = billingInterface.getSettings(mandateId)
|
existingSettings = billingInterface.getSettings(mandateId)
|
||||||
if existingSettings:
|
if existingSettings:
|
||||||
summary["skipped"].append(f"Billing for {mandateLabel} exists")
|
summary["skipped"].append(f"Billing for {mandateLabel} exists")
|
||||||
|
|
@ -479,8 +532,8 @@ class InvestorDemo2026(_BaseDemoConfig):
|
||||||
summary["removed"].append(f"{len(roles)} roles in {mandateLabel}")
|
summary["removed"].append(f"{len(roles)} roles in {mandateLabel}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
from modules.interfaces.interfaceDbBilling import getRootInterface
|
||||||
billingDb = _getRootInterface().db
|
billingDb = getRootInterface().db
|
||||||
billingSettings = billingDb.getRecordset(BillingSettings, recordFilter={"mandateId": mandateId}) or []
|
billingSettings = billingDb.getRecordset(BillingSettings, recordFilter={"mandateId": mandateId}) or []
|
||||||
for bs in billingSettings:
|
for bs in billingSettings:
|
||||||
billingDb.recordDelete(BillingSettings, bs.get("id"))
|
billingDb.recordDelete(BillingSettings, bs.get("id"))
|
||||||
|
|
|
||||||
|
|
@ -67,6 +67,14 @@ class PwgDemo2026(_BaseDemoConfig):
|
||||||
"Graph-Editor mit dem Pilot-Workflow für Jahresmietzinsbestätigungen "
|
"Graph-Editor mit dem Pilot-Workflow für Jahresmietzinsbestätigungen "
|
||||||
"(als File importiert, active=false). Idempotent."
|
"(als File importiert, active=false). Idempotent."
|
||||||
)
|
)
|
||||||
|
credentials = [
|
||||||
|
{
|
||||||
|
"role": "Demo-Sachbearbeiter",
|
||||||
|
"username": _USER["username"],
|
||||||
|
"email": _USER["email"],
|
||||||
|
"password": _USER["password"],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
# load
|
# load
|
||||||
|
|
@ -98,6 +106,10 @@ class PwgDemo2026(_BaseDemoConfig):
|
||||||
logger.error(f"PWG demo load failed: {e}", exc_info=True)
|
logger.error(f"PWG demo load failed: {e}", exc_info=True)
|
||||||
summary["errors"].append(str(e))
|
summary["errors"].append(str(e))
|
||||||
|
|
||||||
|
# Surface initial credentials so the SysAdmin doesn't have to grep the
|
||||||
|
# source code -- consumed by AdminDemoConfigPage to render a copyable
|
||||||
|
# login box in the result banner.
|
||||||
|
summary["credentials"] = list(self.credentials)
|
||||||
return summary
|
return summary
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
|
|
@ -253,9 +265,17 @@ class PwgDemo2026(_BaseDemoConfig):
|
||||||
summary["errors"].append(f"Feature '{instanceLabel}' in {mandateLabel}: {e}")
|
summary["errors"].append(f"Feature '{instanceLabel}' in {mandateLabel}: {e}")
|
||||||
|
|
||||||
def _ensureFeatureAccess(self, db, userId: str, mandateId: str, mandateLabel: str, summary: Dict):
|
def _ensureFeatureAccess(self, db, userId: str, mandateId: str, mandateLabel: str, summary: Dict):
|
||||||
|
"""Grant the demo user admin access on EVERY feature instance of the
|
||||||
|
mandate. Without an explicit ``FeatureAccess`` + ``{code}-admin`` role
|
||||||
|
the user does not see any feature tile in the UI -- so this method
|
||||||
|
ALSO heals a half-broken state by re-copying the per-feature template
|
||||||
|
roles if they are missing (e.g. when the instance was created via an
|
||||||
|
older code path that skipped ``copyTemplateRoles``).
|
||||||
|
"""
|
||||||
from modules.datamodels.datamodelFeatures import FeatureInstance
|
from modules.datamodels.datamodelFeatures import FeatureInstance
|
||||||
from modules.datamodels.datamodelMembership import FeatureAccess, FeatureAccessRole
|
from modules.datamodels.datamodelMembership import FeatureAccess, FeatureAccessRole
|
||||||
from modules.datamodels.datamodelRbac import Role
|
from modules.datamodels.datamodelRbac import Role
|
||||||
|
from modules.interfaces.interfaceFeatures import getFeatureInterface
|
||||||
|
|
||||||
instances = db.getRecordset(FeatureInstance, recordFilter={"mandateId": mandateId}) or []
|
instances = db.getRecordset(FeatureInstance, recordFilter={"mandateId": mandateId}) or []
|
||||||
|
|
||||||
|
|
@ -280,15 +300,51 @@ class PwgDemo2026(_BaseDemoConfig):
|
||||||
"featureInstanceId": instId,
|
"featureInstanceId": instId,
|
||||||
"roleLabel": adminRoleLabel,
|
"roleLabel": adminRoleLabel,
|
||||||
})
|
})
|
||||||
if adminRoles:
|
|
||||||
adminRoleId = adminRoles[0].get("id")
|
# Self-heal: if the per-feature admin role does not exist on this
|
||||||
existingRole = db.getRecordset(FeatureAccessRole, recordFilter={
|
# instance the template roles were never copied -- copy them now.
|
||||||
"featureAccessId": featureAccessId,
|
if not adminRoles:
|
||||||
"roleId": adminRoleId,
|
logger.warning(
|
||||||
|
"Feature instance %s (%s) is missing role '%s' -- "
|
||||||
|
"re-copying template roles", instId, featureCode, adminRoleLabel,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
fi = getFeatureInterface(db)
|
||||||
|
fi._copyTemplateRoles(featureCode, mandateId, instId)
|
||||||
|
summary["created"].append(
|
||||||
|
f"Repaired template roles for {featureCode} in {mandateLabel}"
|
||||||
|
)
|
||||||
|
except Exception as repairErr:
|
||||||
|
summary["errors"].append(
|
||||||
|
f"Could not repair template roles for {featureCode} "
|
||||||
|
f"in {mandateLabel}: {repairErr}"
|
||||||
|
)
|
||||||
|
adminRoles = db.getRecordset(Role, recordFilter={
|
||||||
|
"featureInstanceId": instId,
|
||||||
|
"roleLabel": adminRoleLabel,
|
||||||
})
|
})
|
||||||
if not existingRole:
|
|
||||||
far = FeatureAccessRole(featureAccessId=featureAccessId, roleId=adminRoleId)
|
if not adminRoles:
|
||||||
db.recordCreate(FeatureAccessRole, far)
|
# Hard fail surfaced to UI -- without the admin role the user
|
||||||
|
# would silently not see the instance.
|
||||||
|
summary["errors"].append(
|
||||||
|
f"Admin role '{adminRoleLabel}' not found for feature "
|
||||||
|
f"instance {featureCode} in {mandateLabel} -- demo user "
|
||||||
|
f"will not see this feature."
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
adminRoleId = adminRoles[0].get("id")
|
||||||
|
existingRole = db.getRecordset(FeatureAccessRole, recordFilter={
|
||||||
|
"featureAccessId": featureAccessId,
|
||||||
|
"roleId": adminRoleId,
|
||||||
|
})
|
||||||
|
if not existingRole:
|
||||||
|
far = FeatureAccessRole(featureAccessId=featureAccessId, roleId=adminRoleId)
|
||||||
|
db.recordCreate(FeatureAccessRole, far)
|
||||||
|
summary["created"].append(
|
||||||
|
f"Role '{adminRoleLabel}' assigned to demo user in {mandateLabel}"
|
||||||
|
)
|
||||||
|
|
||||||
def _ensureNeutralizationConfig(self, db, mandateId: Optional[str], userId: Optional[str], summary: Dict):
|
def _ensureNeutralizationConfig(self, db, mandateId: Optional[str], userId: Optional[str], summary: Dict):
|
||||||
if not mandateId or not userId:
|
if not mandateId or not userId:
|
||||||
|
|
@ -321,9 +377,9 @@ class PwgDemo2026(_BaseDemoConfig):
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
from modules.datamodels.datamodelBilling import BillingSettings
|
from modules.datamodels.datamodelBilling import BillingSettings
|
||||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
from modules.interfaces.interfaceDbBilling import getRootInterface
|
||||||
|
|
||||||
billingInterface = _getRootInterface()
|
billingInterface = getRootInterface()
|
||||||
existingSettings = billingInterface.getSettings(mandateId)
|
existingSettings = billingInterface.getSettings(mandateId)
|
||||||
if existingSettings:
|
if existingSettings:
|
||||||
summary["skipped"].append(f"Billing for {mandateLabel} exists")
|
summary["skipped"].append(f"Billing for {mandateLabel} exists")
|
||||||
|
|
@ -447,11 +503,12 @@ class PwgDemo2026(_BaseDemoConfig):
|
||||||
if monthlyRent <= 0:
|
if monthlyRent <= 0:
|
||||||
continue
|
continue
|
||||||
for month in range(1, 13):
|
for month in range(1, 13):
|
||||||
bookingDate = f"{year}-{month:02d}-01"
|
from datetime import datetime as _dtCls, timezone as _tzCls
|
||||||
|
bookingTs = _dtCls(year, month, 1, tzinfo=_tzCls.utc).timestamp()
|
||||||
entryRef = f"PWG-{tenant.get('contactNumber')}-{year}{month:02d}"
|
entryRef = f"PWG-{tenant.get('contactNumber')}-{year}{month:02d}"
|
||||||
entry = TrusteeDataJournalEntry(
|
entry = TrusteeDataJournalEntry(
|
||||||
externalId=entryRef,
|
externalId=entryRef,
|
||||||
bookingDate=bookingDate,
|
bookingDate=bookingTs,
|
||||||
reference=entryRef,
|
reference=entryRef,
|
||||||
description=f"Mietzins {month:02d}/{year} {name}",
|
description=f"Mietzins {month:02d}/{year} {name}",
|
||||||
currency="CHF",
|
currency="CHF",
|
||||||
|
|
@ -652,8 +709,8 @@ class PwgDemo2026(_BaseDemoConfig):
|
||||||
db.recordDelete(Role, role.get("id"))
|
db.recordDelete(Role, role.get("id"))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
from modules.interfaces.interfaceDbBilling import getRootInterface
|
||||||
billingDb = _getRootInterface().db
|
billingDb = getRootInterface().db
|
||||||
billingSettings = billingDb.getRecordset(BillingSettings, recordFilter={"mandateId": mandateId}) or []
|
billingSettings = billingDb.getRecordset(BillingSettings, recordFilter={"mandateId": mandateId}) or []
|
||||||
for bs in billingSettings:
|
for bs in billingSettings:
|
||||||
billingDb.recordDelete(BillingSettings, bs.get("id"))
|
billingDb.recordDelete(BillingSettings, bs.get("id"))
|
||||||
|
|
|
||||||
|
|
@ -139,13 +139,13 @@ def storeDebugMessageAndDocuments(message, currentUser, mandateId=None, featureI
|
||||||
try:
|
try:
|
||||||
import os
|
import os
|
||||||
from datetime import datetime, UTC
|
from datetime import datetime, UTC
|
||||||
from modules.shared.debugLogger import _getBaseDebugDir, _ensureDir
|
from modules.shared.debugLogger import getBaseDebugDir, ensureDir
|
||||||
from modules.interfaces.interfaceDbManagement import getInterface
|
from modules.interfaces.interfaceDbManagement import getInterface
|
||||||
|
|
||||||
# Create base debug directory (use base debug dir, not prompts subdirectory)
|
# Create base debug directory (use base debug dir, not prompts subdirectory)
|
||||||
baseDebugDir = _getBaseDebugDir()
|
baseDebugDir = getBaseDebugDir()
|
||||||
debug_root = os.path.join(baseDebugDir, 'messages')
|
debug_root = os.path.join(baseDebugDir, 'messages')
|
||||||
_ensureDir(debug_root)
|
ensureDir(debug_root)
|
||||||
|
|
||||||
# Generate timestamp
|
# Generate timestamp
|
||||||
timestamp = datetime.now(UTC).strftime('%Y%m%d-%H%M%S-%f')[:-3]
|
timestamp = datetime.now(UTC).strftime('%Y%m%d-%H%M%S-%f')[:-3]
|
||||||
|
|
@ -210,7 +210,7 @@ def storeDebugMessageAndDocuments(message, currentUser, mandateId=None, featureI
|
||||||
safe_label = "default"
|
safe_label = "default"
|
||||||
|
|
||||||
label_folder = os.path.join(message_path, safe_label)
|
label_folder = os.path.join(message_path, safe_label)
|
||||||
_ensureDir(label_folder)
|
ensureDir(label_folder)
|
||||||
|
|
||||||
# Store each document
|
# Store each document
|
||||||
for i, doc in enumerate(docs):
|
for i, doc in enumerate(docs):
|
||||||
|
|
@ -401,8 +401,8 @@ class ChatObjects:
|
||||||
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
|
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
|
||||||
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
|
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
|
||||||
|
|
||||||
from modules.connectors.connectorDbPostgre import _get_cached_connector
|
from modules.connectors.connectorDbPostgre import getCachedConnector
|
||||||
self.db = _get_cached_connector(
|
self.db = getCachedConnector(
|
||||||
dbHost=dbHost,
|
dbHost=dbHost,
|
||||||
dbDatabase=dbDatabase,
|
dbDatabase=dbDatabase,
|
||||||
dbUser=dbUser,
|
dbUser=dbUser,
|
||||||
|
|
|
||||||
|
|
@ -35,17 +35,6 @@ from modules.features.chatbot.mainChatbot import getEventManager
|
||||||
from modules.shared.i18nRegistry import apiRouteContext
|
from modules.shared.i18nRegistry import apiRouteContext
|
||||||
routeApiMsg = apiRouteContext("routeFeatureChatbot")
|
routeApiMsg = apiRouteContext("routeFeatureChatbot")
|
||||||
|
|
||||||
# Pre-warm AI connectors when this router loads (before first request).
|
|
||||||
# Ensures connectors are ready; avoids 4–8 s delay on first chatbot message.
|
|
||||||
try:
|
|
||||||
import modules.aicore.aicoreModelRegistry # noqa: F401
|
|
||||||
from modules.aicore.aicoreModelRegistry import modelRegistry
|
|
||||||
modelRegistry.ensureConnectorsRegistered()
|
|
||||||
modelRegistry.refreshModels(force=True)
|
|
||||||
logging.getLogger(__name__).info("Chatbot router: AI connectors pre-warmed")
|
|
||||||
except Exception as e:
|
|
||||||
logging.getLogger(__name__).warning(f"Chatbot AI pre-warm failed: {e}")
|
|
||||||
|
|
||||||
# Configure logger
|
# Configure logger
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
@ -204,19 +193,20 @@ def get_chatbot_threads(
|
||||||
normalized_wf["maxSteps"] = 10
|
normalized_wf["maxSteps"] = 10
|
||||||
normalized_workflows.append(normalized_wf)
|
normalized_workflows.append(normalized_wf)
|
||||||
|
|
||||||
metadata = PaginationMetadata(
|
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||||
currentPage=paginationParams.page if paginationParams else 1,
|
enriched = enrichRowsWithFkLabels(normalized_workflows, ChatbotConversation)
|
||||||
pageSize=paginationParams.pageSize if paginationParams else len(workflows),
|
|
||||||
totalItems=totalItems,
|
|
||||||
totalPages=totalPages,
|
|
||||||
sort=paginationParams.sort if paginationParams else [],
|
|
||||||
filters=paginationParams.filters if paginationParams else None
|
|
||||||
)
|
|
||||||
|
|
||||||
return PaginatedResponse(
|
return {
|
||||||
items=normalized_workflows,
|
"items": enriched,
|
||||||
pagination=metadata
|
"pagination": PaginationMetadata(
|
||||||
)
|
currentPage=paginationParams.page if paginationParams else 1,
|
||||||
|
pageSize=paginationParams.pageSize if paginationParams else len(workflows),
|
||||||
|
totalItems=totalItems,
|
||||||
|
totalPages=totalPages,
|
||||||
|
sort=paginationParams.sort if paginationParams else [],
|
||||||
|
filters=paginationParams.filters if paginationParams else None
|
||||||
|
).model_dump(),
|
||||||
|
}
|
||||||
|
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
raise
|
raise
|
||||||
|
|
|
||||||
|
|
@ -90,7 +90,7 @@ class CoachingContext(PowerOnModel):
|
||||||
metadata: Optional[str] = Field(default=None, description="JSON object with flexible metadata")
|
metadata: Optional[str] = Field(default=None, description="JSON object with flexible metadata")
|
||||||
sessionCount: int = Field(default=0)
|
sessionCount: int = Field(default=0)
|
||||||
taskCount: int = Field(default=0)
|
taskCount: int = Field(default=0)
|
||||||
lastSessionAt: Optional[str] = Field(default=None)
|
lastSessionAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||||
rollingOverview: Optional[str] = Field(default=None, description="AI summary of older sessions for long context history")
|
rollingOverview: Optional[str] = Field(default=None, description="AI summary of older sessions for long context history")
|
||||||
rollingOverviewUpToSessionCount: Optional[int] = Field(default=None, description="Session count covered by rollingOverview")
|
rollingOverviewUpToSessionCount: Optional[int] = Field(default=None, description="Session count covered by rollingOverview")
|
||||||
|
|
||||||
|
|
@ -113,8 +113,8 @@ class CoachingSession(PowerOnModel):
|
||||||
messageCount: int = Field(default=0)
|
messageCount: int = Field(default=0)
|
||||||
competenceScore: Optional[float] = Field(default=None, ge=0.0, le=100.0)
|
competenceScore: Optional[float] = Field(default=None, ge=0.0, le=100.0)
|
||||||
emailSent: bool = Field(default=False)
|
emailSent: bool = Field(default=False)
|
||||||
startedAt: Optional[str] = Field(default=None)
|
startedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||||
endedAt: Optional[str] = Field(default=None)
|
endedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||||
|
|
||||||
|
|
||||||
class CoachingMessage(PowerOnModel):
|
class CoachingMessage(PowerOnModel):
|
||||||
|
|
@ -141,8 +141,8 @@ class CoachingTask(PowerOnModel):
|
||||||
description: Optional[str] = Field(default=None)
|
description: Optional[str] = Field(default=None)
|
||||||
status: CoachingTaskStatus = Field(default=CoachingTaskStatus.OPEN)
|
status: CoachingTaskStatus = Field(default=CoachingTaskStatus.OPEN)
|
||||||
priority: CoachingTaskPriority = Field(default=CoachingTaskPriority.MEDIUM)
|
priority: CoachingTaskPriority = Field(default=CoachingTaskPriority.MEDIUM)
|
||||||
dueDate: Optional[str] = Field(default=None)
|
dueDate: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "date"})
|
||||||
completedAt: Optional[str] = Field(default=None)
|
completedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||||
|
|
||||||
|
|
||||||
class CoachingScore(PowerOnModel):
|
class CoachingScore(PowerOnModel):
|
||||||
|
|
@ -171,7 +171,7 @@ class CoachingUserProfile(PowerOnModel):
|
||||||
longestStreak: int = Field(default=0)
|
longestStreak: int = Field(default=0)
|
||||||
totalSessions: int = Field(default=0)
|
totalSessions: int = Field(default=0)
|
||||||
totalMinutes: int = Field(default=0)
|
totalMinutes: int = Field(default=0)
|
||||||
lastSessionAt: Optional[str] = Field(default=None)
|
lastSessionAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|
@ -204,7 +204,7 @@ class CoachingBadge(PowerOnModel):
|
||||||
mandateId: str = Field(description="Mandate ID")
|
mandateId: str = Field(description="Mandate ID")
|
||||||
instanceId: str = Field(description="Feature instance ID")
|
instanceId: str = Field(description="Feature instance ID")
|
||||||
badgeKey: str = Field(description="Badge identifier, e.g. 'streak_7'")
|
badgeKey: str = Field(description="Badge identifier, e.g. 'streak_7'")
|
||||||
awardedAt: Optional[str] = Field(default=None)
|
awardedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|
@ -238,14 +238,14 @@ class CreateTaskRequest(BaseModel):
|
||||||
title: str
|
title: str
|
||||||
description: Optional[str] = None
|
description: Optional[str] = None
|
||||||
priority: Optional[CoachingTaskPriority] = CoachingTaskPriority.MEDIUM
|
priority: Optional[CoachingTaskPriority] = CoachingTaskPriority.MEDIUM
|
||||||
dueDate: Optional[str] = None
|
dueDate: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
class UpdateTaskRequest(BaseModel):
|
class UpdateTaskRequest(BaseModel):
|
||||||
title: Optional[str] = None
|
title: Optional[str] = None
|
||||||
description: Optional[str] = None
|
description: Optional[str] = None
|
||||||
priority: Optional[CoachingTaskPriority] = None
|
priority: Optional[CoachingTaskPriority] = None
|
||||||
dueDate: Optional[str] = None
|
dueDate: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
class UpdateTaskStatusRequest(BaseModel):
|
class UpdateTaskStatusRequest(BaseModel):
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ from typing import Dict, Any, List, Optional
|
||||||
from modules.datamodels.datamodelUam import User
|
from modules.datamodels.datamodelUam import User
|
||||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||||
from modules.shared.dbRegistry import registerDatabase
|
from modules.shared.dbRegistry import registerDatabase
|
||||||
from modules.shared.timeUtils import getIsoTimestamp
|
from modules.shared.timeUtils import getIsoTimestamp, getUtcTimestamp
|
||||||
from modules.shared.configuration import APP_CONFIG
|
from modules.shared.configuration import APP_CONFIG
|
||||||
from modules.shared.i18nRegistry import resolveText, t
|
from modules.shared.i18nRegistry import resolveText, t
|
||||||
|
|
||||||
|
|
@ -112,7 +112,7 @@ class CommcoachObjects:
|
||||||
CoachingSession,
|
CoachingSession,
|
||||||
recordFilter={"contextId": contextId, "userId": userId},
|
recordFilter={"contextId": contextId, "userId": userId},
|
||||||
)
|
)
|
||||||
records.sort(key=lambda r: r.get("startedAt") or r.get("createdAt") or "", reverse=True)
|
records.sort(key=lambda r: r.get("startedAt") or 0, reverse=True)
|
||||||
return records
|
return records
|
||||||
|
|
||||||
def getSession(self, sessionId: str) -> Optional[Dict[str, Any]]:
|
def getSession(self, sessionId: str) -> Optional[Dict[str, Any]]:
|
||||||
|
|
@ -129,7 +129,7 @@ class CommcoachObjects:
|
||||||
def createSession(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
def createSession(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
data["createdAt"] = getIsoTimestamp()
|
data["createdAt"] = getIsoTimestamp()
|
||||||
data["updatedAt"] = getIsoTimestamp()
|
data["updatedAt"] = getIsoTimestamp()
|
||||||
data["startedAt"] = getIsoTimestamp()
|
data["startedAt"] = getUtcTimestamp()
|
||||||
return self.db.recordCreate(CoachingSession, data)
|
return self.db.recordCreate(CoachingSession, data)
|
||||||
|
|
||||||
def updateSession(self, sessionId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
def updateSession(self, sessionId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||||
|
|
@ -281,7 +281,7 @@ class CommcoachObjects:
|
||||||
def getBadges(self, userId: str, instanceId: str) -> List[Dict[str, Any]]:
|
def getBadges(self, userId: str, instanceId: str) -> List[Dict[str, Any]]:
|
||||||
from .datamodelCommcoach import CoachingBadge
|
from .datamodelCommcoach import CoachingBadge
|
||||||
records = self.db.getRecordset(CoachingBadge, recordFilter={"userId": userId, "instanceId": instanceId})
|
records = self.db.getRecordset(CoachingBadge, recordFilter={"userId": userId, "instanceId": instanceId})
|
||||||
records.sort(key=lambda r: r.get("awardedAt") or "", reverse=True)
|
records.sort(key=lambda r: r.get("awardedAt") or 0, reverse=True)
|
||||||
return records
|
return records
|
||||||
|
|
||||||
def hasBadge(self, userId: str, instanceId: str, badgeKey: str) -> bool:
|
def hasBadge(self, userId: str, instanceId: str, badgeKey: str) -> bool:
|
||||||
|
|
@ -291,7 +291,7 @@ class CommcoachObjects:
|
||||||
|
|
||||||
def awardBadge(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
def awardBadge(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
from .datamodelCommcoach import CoachingBadge
|
from .datamodelCommcoach import CoachingBadge
|
||||||
data["awardedAt"] = getIsoTimestamp()
|
data["awardedAt"] = getUtcTimestamp()
|
||||||
data["createdAt"] = getIsoTimestamp()
|
data["createdAt"] = getIsoTimestamp()
|
||||||
return self.db.recordCreate(CoachingBadge, data)
|
return self.db.recordCreate(CoachingBadge, data)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -336,10 +336,10 @@ async def startSession(
|
||||||
try:
|
try:
|
||||||
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
||||||
voiceInterface = getVoiceInterface(context.user, mandateId)
|
voiceInterface = getVoiceInterface(context.user, mandateId)
|
||||||
from .serviceCommcoach import _getUserVoicePrefs, _stripMarkdownForTts, _buildTtsConfigErrorMessage
|
from .serviceCommcoach import getUserVoicePrefs, stripMarkdownForTts, buildTtsConfigErrorMessage
|
||||||
language, voiceName = _getUserVoicePrefs(userId, mandateId)
|
language, voiceName = getUserVoicePrefs(userId, mandateId)
|
||||||
ttsResult = await voiceInterface.textToSpeech(
|
ttsResult = await voiceInterface.textToSpeech(
|
||||||
text=_stripMarkdownForTts(greetingText),
|
text=stripMarkdownForTts(greetingText),
|
||||||
languageCode=language,
|
languageCode=language,
|
||||||
voiceName=voiceName,
|
voiceName=voiceName,
|
||||||
)
|
)
|
||||||
|
|
@ -471,10 +471,10 @@ async def cancelSession(
|
||||||
raise HTTPException(status_code=404, detail=routeApiMsg("Session not found"))
|
raise HTTPException(status_code=404, detail=routeApiMsg("Session not found"))
|
||||||
_validateOwnership(session, context)
|
_validateOwnership(session, context)
|
||||||
|
|
||||||
from modules.shared.timeUtils import getIsoTimestamp
|
from modules.shared.timeUtils import getUtcTimestamp
|
||||||
interface.updateSession(sessionId, {
|
interface.updateSession(sessionId, {
|
||||||
"status": CoachingSessionStatus.CANCELLED.value,
|
"status": CoachingSessionStatus.CANCELLED.value,
|
||||||
"endedAt": getIsoTimestamp(),
|
"endedAt": getUtcTimestamp(),
|
||||||
})
|
})
|
||||||
return {"cancelled": True}
|
return {"cancelled": True}
|
||||||
|
|
||||||
|
|
@ -584,8 +584,8 @@ async def sendAudioStream(
|
||||||
if not audioBody:
|
if not audioBody:
|
||||||
raise HTTPException(status_code=400, detail=routeApiMsg("No audio data received"))
|
raise HTTPException(status_code=400, detail=routeApiMsg("No audio data received"))
|
||||||
|
|
||||||
from .serviceCommcoach import _getUserVoicePrefs
|
from .serviceCommcoach import getUserVoicePrefs
|
||||||
language, _ = _getUserVoicePrefs(str(context.user.id), mandateId)
|
language, _ = getUserVoicePrefs(str(context.user.id), mandateId)
|
||||||
|
|
||||||
contextId = session.get("contextId")
|
contextId = session.get("contextId")
|
||||||
service = CommcoachService(context.user, mandateId, instanceId)
|
service = CommcoachService(context.user, mandateId, instanceId)
|
||||||
|
|
@ -768,8 +768,8 @@ async def updateTaskStatus(
|
||||||
|
|
||||||
updates = {"status": body.status.value}
|
updates = {"status": body.status.value}
|
||||||
if body.status == CoachingTaskStatus.DONE:
|
if body.status == CoachingTaskStatus.DONE:
|
||||||
from modules.shared.timeUtils import getIsoTimestamp
|
from modules.shared.timeUtils import getUtcTimestamp
|
||||||
updates["completedAt"] = getIsoTimestamp()
|
updates["completedAt"] = getUtcTimestamp()
|
||||||
|
|
||||||
updated = interface.updateTask(taskId, updates)
|
updated = interface.updateTask(taskId, updates)
|
||||||
return {"task": updated}
|
return {"task": updated}
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ from typing import Optional, Dict, Any, List
|
||||||
|
|
||||||
from modules.datamodels.datamodelUam import User
|
from modules.datamodels.datamodelUam import User
|
||||||
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
|
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
|
||||||
from modules.shared.timeUtils import getIsoTimestamp
|
from modules.shared.timeUtils import getIsoTimestamp, getUtcTimestamp
|
||||||
|
|
||||||
from .datamodelCommcoach import (
|
from .datamodelCommcoach import (
|
||||||
CoachingMessage, CoachingMessageRole, CoachingMessageContentType,
|
CoachingMessage, CoachingMessageRole, CoachingMessageContentType,
|
||||||
|
|
@ -79,7 +79,7 @@ def _selectConfiguredVoice(
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _buildTtsConfigErrorMessage(language: str, voiceName: Optional[str], rawError: str = "") -> str:
|
def buildTtsConfigErrorMessage(language: str, voiceName: Optional[str], rawError: str = "") -> str:
|
||||||
if voiceName:
|
if voiceName:
|
||||||
return (
|
return (
|
||||||
f'Die konfigurierte Stimme "{voiceName}" für {language} ist ungültig oder nicht verfügbar. '
|
f'Die konfigurierte Stimme "{voiceName}" für {language} ist ungültig oder nicht verfügbar. '
|
||||||
|
|
@ -91,7 +91,7 @@ def _buildTtsConfigErrorMessage(language: str, voiceName: Optional[str], rawErro
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _getUserVoicePrefs(userId: str, mandateId: Optional[str] = None) -> tuple:
|
def getUserVoicePrefs(userId: str, mandateId: Optional[str] = None) -> tuple:
|
||||||
"""Load voice language and voiceName from central UserVoicePreferences.
|
"""Load voice language and voiceName from central UserVoicePreferences.
|
||||||
Returns (language, voiceName) tuple."""
|
Returns (language, voiceName) tuple."""
|
||||||
try:
|
try:
|
||||||
|
|
@ -160,7 +160,7 @@ def _getUserVoicePrefs(userId: str, mandateId: Optional[str] = None) -> tuple:
|
||||||
return ("de-DE", None)
|
return ("de-DE", None)
|
||||||
|
|
||||||
|
|
||||||
def _stripMarkdownForTts(text: str) -> str:
|
def stripMarkdownForTts(text: str) -> str:
|
||||||
"""Strip markdown formatting so TTS reads clean speech text."""
|
"""Strip markdown formatting so TTS reads clean speech text."""
|
||||||
t = text
|
t = text
|
||||||
t = re.sub(r'\*\*(.+?)\*\*', r'\1', t)
|
t = re.sub(r'\*\*(.+?)\*\*', r'\1', t)
|
||||||
|
|
@ -346,9 +346,9 @@ async def _generateAndEmitTts(sessionId: str, speechText: str, currentUser, mand
|
||||||
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
||||||
import base64
|
import base64
|
||||||
voiceInterface = getVoiceInterface(currentUser, mandateId)
|
voiceInterface = getVoiceInterface(currentUser, mandateId)
|
||||||
language, voiceName = _getUserVoicePrefs(str(currentUser.id), mandateId)
|
language, voiceName = getUserVoicePrefs(str(currentUser.id), mandateId)
|
||||||
ttsResult = await voiceInterface.textToSpeech(
|
ttsResult = await voiceInterface.textToSpeech(
|
||||||
text=_stripMarkdownForTts(speechText),
|
text=stripMarkdownForTts(speechText),
|
||||||
languageCode=language,
|
languageCode=language,
|
||||||
voiceName=voiceName,
|
voiceName=voiceName,
|
||||||
)
|
)
|
||||||
|
|
@ -362,7 +362,7 @@ async def _generateAndEmitTts(sessionId: str, speechText: str, currentUser, mand
|
||||||
return
|
return
|
||||||
errorDetail = ttsResult.get("error", "Text-to-Speech failed")
|
errorDetail = ttsResult.get("error", "Text-to-Speech failed")
|
||||||
await emitSessionEvent(sessionId, "error", {
|
await emitSessionEvent(sessionId, "error", {
|
||||||
"message": _buildTtsConfigErrorMessage(language, voiceName, errorDetail),
|
"message": buildTtsConfigErrorMessage(language, voiceName, errorDetail),
|
||||||
"detail": errorDetail,
|
"detail": errorDetail,
|
||||||
"ttsLanguage": language,
|
"ttsLanguage": language,
|
||||||
"ttsVoice": voiceName,
|
"ttsVoice": voiceName,
|
||||||
|
|
@ -370,7 +370,7 @@ async def _generateAndEmitTts(sessionId: str, speechText: str, currentUser, mand
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"TTS failed for session {sessionId}: {e}")
|
logger.warning(f"TTS failed for session {sessionId}: {e}")
|
||||||
await emitSessionEvent(sessionId, "error", {
|
await emitSessionEvent(sessionId, "error", {
|
||||||
"message": _buildTtsConfigErrorMessage("de-DE", None, str(e)),
|
"message": buildTtsConfigErrorMessage("de-DE", None, str(e)),
|
||||||
"detail": str(e),
|
"detail": str(e),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
@ -695,7 +695,7 @@ _TTS_WORD_LIMIT = 200
|
||||||
|
|
||||||
async def _prepareSpeechText(fullText: str, callAiFn) -> str:
|
async def _prepareSpeechText(fullText: str, callAiFn) -> str:
|
||||||
"""Prepare text for TTS. Short responses used directly; long ones get summarized."""
|
"""Prepare text for TTS. Short responses used directly; long ones get summarized."""
|
||||||
cleaned = _stripMarkdownForTts(fullText)
|
cleaned = stripMarkdownForTts(fullText)
|
||||||
wordCount = len(cleaned.split())
|
wordCount = len(cleaned.split())
|
||||||
if wordCount <= _TTS_WORD_LIMIT:
|
if wordCount <= _TTS_WORD_LIMIT:
|
||||||
return cleaned
|
return cleaned
|
||||||
|
|
@ -1107,7 +1107,7 @@ class CommcoachService:
|
||||||
if len(messages) < 2:
|
if len(messages) < 2:
|
||||||
interface.updateSession(sessionId, {
|
interface.updateSession(sessionId, {
|
||||||
"status": CoachingSessionStatus.COMPLETED.value,
|
"status": CoachingSessionStatus.COMPLETED.value,
|
||||||
"endedAt": getIsoTimestamp(),
|
"endedAt": getUtcTimestamp(),
|
||||||
"compressedHistorySummary": None,
|
"compressedHistorySummary": None,
|
||||||
"compressedHistoryUpToMessageCount": None,
|
"compressedHistoryUpToMessageCount": None,
|
||||||
})
|
})
|
||||||
|
|
@ -1252,21 +1252,18 @@ class CommcoachService:
|
||||||
logger.warning(f"Coaching session indexing failed (non-blocking): {e}")
|
logger.warning(f"Coaching session indexing failed (non-blocking): {e}")
|
||||||
|
|
||||||
# Calculate duration
|
# Calculate duration
|
||||||
startedAt = session.get("startedAt", "")
|
startedAt = session.get("startedAt")
|
||||||
durationSeconds = 0
|
durationSeconds = 0
|
||||||
if startedAt:
|
if startedAt:
|
||||||
try:
|
from datetime import datetime, timezone
|
||||||
from datetime import datetime
|
start = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||||
start = datetime.fromisoformat(startedAt.replace("Z", "+00:00"))
|
end = datetime.now(timezone.utc)
|
||||||
end = datetime.now(start.tzinfo) if start.tzinfo else datetime.now()
|
durationSeconds = int((end - start).total_seconds())
|
||||||
durationSeconds = int((end - start).total_seconds())
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Update session - clear compressed history so it never leaks into new sessions
|
# Update session - clear compressed history so it never leaks into new sessions
|
||||||
sessionUpdates = {
|
sessionUpdates = {
|
||||||
"status": CoachingSessionStatus.COMPLETED.value,
|
"status": CoachingSessionStatus.COMPLETED.value,
|
||||||
"endedAt": getIsoTimestamp(),
|
"endedAt": getUtcTimestamp(),
|
||||||
"summary": summary,
|
"summary": summary,
|
||||||
"durationSeconds": durationSeconds,
|
"durationSeconds": durationSeconds,
|
||||||
"messageCount": len(messages),
|
"messageCount": len(messages),
|
||||||
|
|
@ -1285,7 +1282,7 @@ class CommcoachService:
|
||||||
completedCount = len([s for s in allSessions if s.get("status") == CoachingSessionStatus.COMPLETED.value])
|
completedCount = len([s for s in allSessions if s.get("status") == CoachingSessionStatus.COMPLETED.value])
|
||||||
interface.updateContext(contextId, {
|
interface.updateContext(contextId, {
|
||||||
"sessionCount": completedCount,
|
"sessionCount": completedCount,
|
||||||
"lastSessionAt": getIsoTimestamp(),
|
"lastSessionAt": getUtcTimestamp(),
|
||||||
})
|
})
|
||||||
|
|
||||||
# Update user profile streak
|
# Update user profile streak
|
||||||
|
|
@ -1324,26 +1321,23 @@ class CommcoachService:
|
||||||
if not profile:
|
if not profile:
|
||||||
profile = interface.getOrCreateProfile(self.userId, self.mandateId, self.instanceId)
|
profile = interface.getOrCreateProfile(self.userId, self.mandateId, self.instanceId)
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
lastSessionAt = profile.get("lastSessionAt")
|
lastSessionAt = profile.get("lastSessionAt")
|
||||||
currentStreak = profile.get("streakDays", 0)
|
currentStreak = profile.get("streakDays", 0)
|
||||||
longestStreak = profile.get("longestStreak", 0)
|
longestStreak = profile.get("longestStreak", 0)
|
||||||
totalSessions = profile.get("totalSessions", 0)
|
totalSessions = profile.get("totalSessions", 0)
|
||||||
|
|
||||||
today = datetime.now().date()
|
today = datetime.now(timezone.utc).date()
|
||||||
isConsecutive = False
|
isConsecutive = False
|
||||||
|
|
||||||
if lastSessionAt:
|
if lastSessionAt:
|
||||||
try:
|
lastDate = datetime.fromtimestamp(lastSessionAt, tz=timezone.utc).date()
|
||||||
lastDate = datetime.fromisoformat(lastSessionAt.replace("Z", "+00:00")).date()
|
diff = (today - lastDate).days
|
||||||
diff = (today - lastDate).days
|
if diff == 1:
|
||||||
if diff == 1:
|
isConsecutive = True
|
||||||
isConsecutive = True
|
elif diff == 0:
|
||||||
elif diff == 0:
|
isConsecutive = True
|
||||||
isConsecutive = True # Same day, maintain streak
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
newStreak = (currentStreak + 1) if isConsecutive else 1
|
newStreak = (currentStreak + 1) if isConsecutive else 1
|
||||||
newLongest = max(longestStreak, newStreak)
|
newLongest = max(longestStreak, newStreak)
|
||||||
|
|
@ -1352,7 +1346,7 @@ class CommcoachService:
|
||||||
"streakDays": newStreak,
|
"streakDays": newStreak,
|
||||||
"longestStreak": newLongest,
|
"longestStreak": newLongest,
|
||||||
"totalSessions": totalSessions + 1,
|
"totalSessions": totalSessions + 1,
|
||||||
"lastSessionAt": getIsoTimestamp(),
|
"lastSessionAt": getUtcTimestamp(),
|
||||||
})
|
})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Failed to update streak: {e}")
|
logger.warning(f"Failed to update streak: {e}")
|
||||||
|
|
@ -1373,7 +1367,7 @@ class CommcoachService:
|
||||||
|
|
||||||
from modules.interfaces.interfaceMessaging import getInterface as getMessagingInterface
|
from modules.interfaces.interfaceMessaging import getInterface as getMessagingInterface
|
||||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||||
from modules.shared.notifyMandateAdmins import _renderHtmlEmail, _resolveMandateName
|
from modules.shared.notifyMandateAdmins import renderHtmlEmail, resolveMandateName
|
||||||
|
|
||||||
rootInterface = getRootInterface()
|
rootInterface = getRootInterface()
|
||||||
user = rootInterface.getUser(self.userId)
|
user = rootInterface.getUser(self.userId)
|
||||||
|
|
@ -1382,9 +1376,9 @@ class CommcoachService:
|
||||||
|
|
||||||
messaging = getMessagingInterface()
|
messaging = getMessagingInterface()
|
||||||
subject = f"Coaching-Session Zusammenfassung: {contextTitle}"
|
subject = f"Coaching-Session Zusammenfassung: {contextTitle}"
|
||||||
mandateName = _resolveMandateName(self.mandateId)
|
mandateName = resolveMandateName(self.mandateId)
|
||||||
contentHtml = _buildSummaryEmailBlock(emailData, summary, contextTitle)
|
contentHtml = _buildSummaryEmailBlock(emailData, summary, contextTitle)
|
||||||
htmlMessage = _renderHtmlEmail(
|
htmlMessage = renderHtmlEmail(
|
||||||
"Coaching-Session Zusammenfassung",
|
"Coaching-Session Zusammenfassung",
|
||||||
[
|
[
|
||||||
f'Thema: {contextTitle}',
|
f'Thema: {contextTitle}',
|
||||||
|
|
@ -1418,14 +1412,13 @@ class CommcoachService:
|
||||||
completedSessions = [s for s in allSessions if s.get("status") == CoachingSessionStatus.COMPLETED.value]
|
completedSessions = [s for s in allSessions if s.get("status") == CoachingSessionStatus.COMPLETED.value]
|
||||||
|
|
||||||
for s in completedSessions:
|
for s in completedSessions:
|
||||||
startedAt = s.get("startedAt") or s.get("createdAt") or ""
|
startedAt = s.get("startedAt")
|
||||||
if startedAt:
|
if startedAt:
|
||||||
try:
|
from datetime import datetime, timezone
|
||||||
from datetime import datetime
|
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||||
dt = datetime.fromisoformat(str(startedAt).replace("Z", "+00:00"))
|
s["date"] = dt.strftime("%d.%m.%Y")
|
||||||
s["date"] = dt.strftime("%d.%m.%Y")
|
else:
|
||||||
except Exception:
|
s["date"] = ""
|
||||||
s["date"] = ""
|
|
||||||
|
|
||||||
result = {
|
result = {
|
||||||
"intent": intent,
|
"intent": intent,
|
||||||
|
|
|
||||||
|
|
@ -206,14 +206,11 @@ Tool-Nutzung:
|
||||||
|
|
||||||
if retrievedSession:
|
if retrievedSession:
|
||||||
dateStr = ""
|
dateStr = ""
|
||||||
startedAt = retrievedSession.get("startedAt") or retrievedSession.get("createdAt")
|
startedAt = retrievedSession.get("startedAt")
|
||||||
if startedAt:
|
if startedAt:
|
||||||
try:
|
from datetime import datetime, timezone
|
||||||
from datetime import datetime
|
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||||
dt = datetime.fromisoformat(str(startedAt).replace("Z", "+00:00"))
|
dateStr = dt.strftime("%d.%m.%Y")
|
||||||
dateStr = dt.strftime("%d.%m.%Y")
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
prompt += f"\n\nVom Benutzer angefragte Session ({dateStr}):"
|
prompt += f"\n\nVom Benutzer angefragte Session ({dateStr}):"
|
||||||
prompt += f"\n{retrievedSession.get('summary', '')[:500]}"
|
prompt += f"\n{retrievedSession.get('summary', '')[:500]}"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ Intent detection, retrieval strategies, and context assembly for intelligent ses
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
from typing import Optional, Dict, Any, List, Tuple
|
from typing import Optional, Dict, Any, List, Tuple
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
|
|
@ -106,18 +106,15 @@ def findSessionByDate(
|
||||||
for s in sessions:
|
for s in sessions:
|
||||||
if s.get("status") != "completed":
|
if s.get("status") != "completed":
|
||||||
continue
|
continue
|
||||||
startedAt = s.get("startedAt") or s.get("endedAt") or s.get("createdAt")
|
startedAt = s.get("startedAt") or s.get("endedAt")
|
||||||
if not startedAt:
|
if not startedAt:
|
||||||
continue
|
continue
|
||||||
try:
|
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||||
dt = datetime.fromisoformat(startedAt.replace("Z", "+00:00"))
|
sessionDate = dt.date()
|
||||||
sessionDate = dt.date()
|
diff = abs((sessionDate - targetDateOnly).days)
|
||||||
diff = abs((sessionDate - targetDateOnly).days)
|
if bestDiff is None or diff < bestDiff:
|
||||||
if bestDiff is None or diff < bestDiff:
|
bestDiff = diff
|
||||||
bestDiff = diff
|
bestMatch = s
|
||||||
bestMatch = s
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
|
|
||||||
return bestMatch
|
return bestMatch
|
||||||
|
|
||||||
|
|
@ -231,17 +228,14 @@ def buildSessionSummariesForPrompt(
|
||||||
and s.get("summary")
|
and s.get("summary")
|
||||||
and s.get("id") != excludeSessionId
|
and s.get("id") != excludeSessionId
|
||||||
]
|
]
|
||||||
completed.sort(key=lambda x: x.get("startedAt") or x.get("createdAt") or "", reverse=True)
|
completed.sort(key=lambda x: x.get("startedAt") or 0, reverse=True)
|
||||||
result = []
|
result = []
|
||||||
for s in completed[:limit]:
|
for s in completed[:limit]:
|
||||||
startedAt = s.get("startedAt") or s.get("createdAt") or ""
|
startedAt = s.get("startedAt")
|
||||||
dateStr = ""
|
dateStr = ""
|
||||||
if startedAt:
|
if startedAt:
|
||||||
try:
|
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||||
dt = datetime.fromisoformat(startedAt.replace("Z", "+00:00"))
|
dateStr = dt.strftime("%d.%m.%Y")
|
||||||
dateStr = dt.strftime("%d.%m.%Y")
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
result.append({
|
result.append({
|
||||||
"summary": s.get("summary", ""),
|
"summary": s.get("summary", ""),
|
||||||
"date": dateStr,
|
"date": dateStr,
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ Generates Markdown and PDF exports for dossiers and sessions.
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
from typing import Dict, Any, List, Optional
|
from typing import Dict, Any, List, Optional
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
@ -49,7 +49,7 @@ def buildDossierMarkdown(context: Dict[str, Any], sessions: List[Dict[str, Any]]
|
||||||
lines.append(f"- {text}")
|
lines.append(f"- {text}")
|
||||||
|
|
||||||
completedSessions = [s for s in sessions if s.get("status") == "completed"]
|
completedSessions = [s for s in sessions if s.get("status") == "completed"]
|
||||||
completedSessions.sort(key=lambda s: s.get("startedAt") or s.get("createdAt") or "")
|
completedSessions.sort(key=lambda s: s.get("startedAt") or 0)
|
||||||
if completedSessions:
|
if completedSessions:
|
||||||
lines += ["", "## Sessions", ""]
|
lines += ["", "## Sessions", ""]
|
||||||
for i, s in enumerate(completedSessions, 1):
|
for i, s in enumerate(completedSessions, 1):
|
||||||
|
|
@ -227,14 +227,14 @@ def _mdToXml(text: str) -> str:
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def _formatDate(isoStr: Optional[str]) -> str:
|
def _formatDate(val) -> str:
|
||||||
if not isoStr:
|
if not val:
|
||||||
return datetime.now().strftime("%d.%m.%Y")
|
return datetime.now(timezone.utc).strftime("%d.%m.%Y")
|
||||||
try:
|
if isinstance(val, (int, float)):
|
||||||
dt = datetime.fromisoformat(str(isoStr).replace("Z", "+00:00"))
|
dt = datetime.fromtimestamp(float(val), tz=timezone.utc)
|
||||||
return dt.strftime("%d.%m.%Y")
|
return dt.strftime("%d.%m.%Y")
|
||||||
except Exception:
|
dt = datetime.fromisoformat(str(val).replace("Z", "+00:00"))
|
||||||
return isoStr
|
return dt.strftime("%d.%m.%Y")
|
||||||
|
|
||||||
|
|
||||||
def _parseJson(value, fallback):
|
def _parseJson(value, fallback):
|
||||||
|
|
|
||||||
|
|
@ -174,14 +174,26 @@ async def indexSessionData(
|
||||||
for c in chunks
|
for c in chunks
|
||||||
]
|
]
|
||||||
|
|
||||||
await knowledgeService.indexFile(
|
from modules.serviceCenter.services.serviceKnowledge.mainServiceKnowledge import IngestionJob
|
||||||
fileId=syntheticFileId,
|
|
||||||
fileName=f"coaching-session-{sessionId[:8]}",
|
await knowledgeService.requestIngestion(
|
||||||
mimeType="application/x-coaching-session",
|
IngestionJob(
|
||||||
userId=userId,
|
sourceKind="coaching_session",
|
||||||
featureInstanceId=featureInstanceId,
|
sourceId=syntheticFileId,
|
||||||
mandateId=mandateId,
|
fileName=f"coaching-session-{sessionId[:8]}",
|
||||||
contentObjects=contentObjects,
|
mimeType="application/x-coaching-session",
|
||||||
|
userId=userId,
|
||||||
|
featureInstanceId=featureInstanceId,
|
||||||
|
mandateId=mandateId,
|
||||||
|
contentObjects=contentObjects,
|
||||||
|
provenance={
|
||||||
|
"lane": "feature",
|
||||||
|
"feature": "commcoach",
|
||||||
|
"sessionId": sessionId,
|
||||||
|
"contextId": contextId,
|
||||||
|
"messageCount": len(messages or []),
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
logger.info(f"Successfully indexed coaching session {sessionId} ({len(chunks)} chunks)")
|
logger.info(f"Successfully indexed coaching session {sessionId} ({len(chunks)} chunks)")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
|
||||||
|
|
@ -64,7 +64,7 @@ async def _runDailyReminders():
|
||||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||||
from .datamodelCommcoach import CoachingUserProfile, CoachingContextStatus
|
from .datamodelCommcoach import CoachingUserProfile, CoachingContextStatus
|
||||||
from modules.interfaces.interfaceMessaging import getInterface as getMessagingInterface
|
from modules.interfaces.interfaceMessaging import getInterface as getMessagingInterface
|
||||||
from modules.shared.notifyMandateAdmins import _renderHtmlEmail, _resolveMandateName
|
from modules.shared.notifyMandateAdmins import renderHtmlEmail, resolveMandateName
|
||||||
|
|
||||||
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
|
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
|
||||||
db = DatabaseConnector(
|
db = DatabaseConnector(
|
||||||
|
|
@ -106,8 +106,8 @@ async def _runDailyReminders():
|
||||||
contextList = ", ".join(contextTitles)
|
contextList = ", ".join(contextTitles)
|
||||||
|
|
||||||
subject = "Dein tägliches Coaching wartet"
|
subject = "Dein tägliches Coaching wartet"
|
||||||
mandateName = _resolveMandateName(profile.get("mandateId"))
|
mandateName = resolveMandateName(profile.get("mandateId"))
|
||||||
htmlMessage = _renderHtmlEmail(
|
htmlMessage = renderHtmlEmail(
|
||||||
"Zeit für dein tägliches Coaching",
|
"Zeit für dein tägliches Coaching",
|
||||||
[
|
[
|
||||||
f"Du hast aktuell {len(contexts)} aktive Coaching-Themen.",
|
f"Du hast aktuell {len(contexts)} aktive Coaching-Themen.",
|
||||||
|
|
|
||||||
205
modules/features/graphicalEditor/adapterValidator.py
Normal file
205
modules/features/graphicalEditor/adapterValidator.py
Normal file
|
|
@ -0,0 +1,205 @@
|
||||||
|
# Copyright (c) 2025 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""
|
||||||
|
Adapter Validator — enforces 5 drift rules between Schicht-3 NodeAdapters
|
||||||
|
and the Schicht-2 Actions they bind to.
|
||||||
|
|
||||||
|
This is the CI-safety net described in the typed-action-architecture plan:
|
||||||
|
any drift between an Editor-Node Adapter and the underlying Action signature
|
||||||
|
must be caught at build time, never silently in production.
|
||||||
|
|
||||||
|
Rules
|
||||||
|
-----
|
||||||
|
1. Every `userParams[].actionArg` exists as a parameter in the bound Action.
|
||||||
|
2. Every required Action parameter is covered by either `userParams` or
|
||||||
|
`contextParams` (i.e. no required arg is silently unset).
|
||||||
|
3. Every Action parameter type exists in PORT_TYPE_CATALOG (or is a primitive).
|
||||||
|
4. The Action `outputType` exists in PORT_TYPE_CATALOG (or is a primitive).
|
||||||
|
5. Every method-bound STATIC node has an Adapter (no orphan node ids).
|
||||||
|
|
||||||
|
Rules 3+4 are already enforced by `_actionSignatureValidator` in Phase 2 —
|
||||||
|
this module composes with it so the report covers both layers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any, Dict, List, Mapping
|
||||||
|
|
||||||
|
from modules.features.graphicalEditor.nodeAdapter import (
|
||||||
|
NodeAdapter,
|
||||||
|
_adapterFromLegacyNode,
|
||||||
|
_isMethodBoundNode,
|
||||||
|
)
|
||||||
|
from modules.workflows.methods._actionSignatureValidator import _validateTypeRef
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AdapterValidationReport:
|
||||||
|
"""Aggregated drift report across all adapters."""
|
||||||
|
|
||||||
|
errors: List[str] = field(default_factory=list)
|
||||||
|
warnings: List[str] = field(default_factory=list)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def isHealthy(self) -> bool:
|
||||||
|
return not self.errors
|
||||||
|
|
||||||
|
def merge(self, other: "AdapterValidationReport") -> None:
|
||||||
|
self.errors.extend(other.errors)
|
||||||
|
self.warnings.extend(other.warnings)
|
||||||
|
|
||||||
|
|
||||||
|
def _validateAdapterAgainstAction(
|
||||||
|
adapter: NodeAdapter,
|
||||||
|
actionDef: Any,
|
||||||
|
) -> AdapterValidationReport:
|
||||||
|
"""Apply rules 1-4 to a single Adapter / Action pair.
|
||||||
|
|
||||||
|
`actionDef` is duck-typed so tests can pass dataclasses; production passes
|
||||||
|
a `WorkflowActionDefinition` Pydantic model.
|
||||||
|
"""
|
||||||
|
report = AdapterValidationReport()
|
||||||
|
actionParams: Mapping[str, Any] = getattr(actionDef, "parameters", {}) or {}
|
||||||
|
outputType: str = getattr(actionDef, "outputType", "ActionResult") or "ActionResult"
|
||||||
|
|
||||||
|
# Rule 1: every userParam.actionArg exists in the Action
|
||||||
|
declaredArgs = {up.actionArg for up in adapter.userParams}
|
||||||
|
for arg in declaredArgs:
|
||||||
|
if arg not in actionParams:
|
||||||
|
report.errors.append(
|
||||||
|
f"adapter '{adapter.nodeId}' bindsAction '{adapter.bindsAction}': "
|
||||||
|
f"userParams.actionArg '{arg}' does not exist in action parameters "
|
||||||
|
f"(known: {sorted(actionParams.keys())})"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Rule 2: every required Action arg is covered (userParams OR contextParams)
|
||||||
|
coveredArgs = declaredArgs | set(adapter.contextParams.keys())
|
||||||
|
for paramName, paramDef in actionParams.items():
|
||||||
|
isRequired = bool(getattr(paramDef, "required", False))
|
||||||
|
if isRequired and paramName not in coveredArgs:
|
||||||
|
report.errors.append(
|
||||||
|
f"adapter '{adapter.nodeId}' bindsAction '{adapter.bindsAction}': "
|
||||||
|
f"required action arg '{paramName}' is neither in userParams nor contextParams"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Rule 3: every Action parameter type exists in catalog (re-runs Phase-2 rule)
|
||||||
|
for paramName, paramDef in actionParams.items():
|
||||||
|
typeRef = getattr(paramDef, "type", None)
|
||||||
|
if not typeRef:
|
||||||
|
report.errors.append(
|
||||||
|
f"action '{adapter.bindsAction}.{paramName}': missing 'type' on parameter"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
for err in _validateTypeRef(typeRef):
|
||||||
|
report.errors.append(
|
||||||
|
f"action '{adapter.bindsAction}.{paramName}': {err}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Rule 4: Action outputType exists in catalog (or is a generic fire-and-forget type)
|
||||||
|
if outputType not in {"ActionResult", "Transit"}:
|
||||||
|
for err in _validateTypeRef(outputType):
|
||||||
|
report.errors.append(
|
||||||
|
f"action '{adapter.bindsAction}'.outputType: {err}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return report
|
||||||
|
|
||||||
|
|
||||||
|
def _validateAllAdapters(
|
||||||
|
staticNodes: List[Mapping[str, Any]],
|
||||||
|
actionsRegistry: Mapping[str, Mapping[str, Any]],
|
||||||
|
) -> AdapterValidationReport:
|
||||||
|
"""Run rules 1-5 across all method-bound static node definitions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
staticNodes: list of legacy node-dicts (`STATIC_NODE_TYPES`).
|
||||||
|
actionsRegistry: mapping of method-shortname -> {actionName: WorkflowActionDefinition}.
|
||||||
|
Built from live `methods` registry or test-stubbed methods.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Aggregated drift report. `isHealthy` is True only if every method-bound
|
||||||
|
node has a matching Action and all 5 rules pass.
|
||||||
|
"""
|
||||||
|
report = AdapterValidationReport()
|
||||||
|
seenAdapterIds: set[str] = set()
|
||||||
|
|
||||||
|
for node in staticNodes:
|
||||||
|
if not _isMethodBoundNode(node):
|
||||||
|
continue
|
||||||
|
|
||||||
|
adapter = _adapterFromLegacyNode(node)
|
||||||
|
if adapter is None:
|
||||||
|
report.errors.append(
|
||||||
|
f"node '{node.get('id')}' is method-bound but adapter projection failed"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
seenAdapterIds.add(adapter.nodeId)
|
||||||
|
|
||||||
|
methodName = str(node.get("_method") or "")
|
||||||
|
actionName = str(node.get("_action") or "")
|
||||||
|
methodActions = actionsRegistry.get(methodName) or {}
|
||||||
|
actionDef = methodActions.get(actionName)
|
||||||
|
if actionDef is None:
|
||||||
|
report.errors.append(
|
||||||
|
f"adapter '{adapter.nodeId}' bindsAction '{adapter.bindsAction}': "
|
||||||
|
f"action not found in registry (method '{methodName}' has actions: "
|
||||||
|
f"{sorted(methodActions.keys())})"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
report.merge(_validateAdapterAgainstAction(adapter, actionDef))
|
||||||
|
|
||||||
|
# Rule 5: every Action with dynamicMode=False MUST have an Editor Adapter.
|
||||||
|
# dynamicMode=True actions are agent-only and may legitimately lack one.
|
||||||
|
boundActions: set[str] = set()
|
||||||
|
for node in staticNodes:
|
||||||
|
if not _isMethodBoundNode(node):
|
||||||
|
continue
|
||||||
|
boundActions.add(f"{node.get('_method')}.{node.get('_action')}")
|
||||||
|
|
||||||
|
for methodName, actions in actionsRegistry.items():
|
||||||
|
for actionName, actionDef in actions.items():
|
||||||
|
if bool(getattr(actionDef, "dynamicMode", False)):
|
||||||
|
continue
|
||||||
|
fqn = f"{methodName}.{actionName}"
|
||||||
|
if fqn not in boundActions:
|
||||||
|
report.warnings.append(
|
||||||
|
f"action '{fqn}' has no Editor adapter "
|
||||||
|
f"(set dynamicMode=True if intended as agent-only)"
|
||||||
|
)
|
||||||
|
|
||||||
|
return report
|
||||||
|
|
||||||
|
|
||||||
|
def _formatAdapterReport(report: AdapterValidationReport) -> str:
|
||||||
|
"""Format a report for human-readable logging."""
|
||||||
|
lines: List[str] = []
|
||||||
|
if report.isHealthy and not report.warnings:
|
||||||
|
lines.append("Adapter validator: all healthy.")
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
if report.errors:
|
||||||
|
lines.append(f"Adapter validator: {len(report.errors)} ERROR(s)")
|
||||||
|
for e in report.errors:
|
||||||
|
lines.append(f" ERROR: {e}")
|
||||||
|
if report.warnings:
|
||||||
|
lines.append(f"Adapter validator: {len(report.warnings)} WARNING(s)")
|
||||||
|
for w in report.warnings:
|
||||||
|
lines.append(f" WARN: {w}")
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
def _buildActionsRegistryFromMethods(
|
||||||
|
methodInstances: Mapping[str, Any],
|
||||||
|
) -> Dict[str, Dict[str, Any]]:
|
||||||
|
"""Convenience: turn `{shortName: methodInstance}` into the registry shape.
|
||||||
|
|
||||||
|
`methodInstance._actions` is a dict of action-name -> WorkflowActionDefinition.
|
||||||
|
"""
|
||||||
|
registry: Dict[str, Dict[str, Any]] = {}
|
||||||
|
for shortName, instance in methodInstances.items():
|
||||||
|
actions = getattr(instance, "_actions", None)
|
||||||
|
if isinstance(actions, dict):
|
||||||
|
registry[shortName] = dict(actions)
|
||||||
|
return registry
|
||||||
|
|
@ -68,23 +68,28 @@ class AutoWorkflow(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Mandanten-ID",
|
"label": "Mandanten-ID",
|
||||||
"frontend_fk_source": "/api/mandates/",
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
"frontend_fk_display_field": "label",
|
|
||||||
"fk_model": "Mandate",
|
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
featureInstanceId: str = Field(
|
featureInstanceId: str = Field(
|
||||||
description="Feature instance ID",
|
description="Feature instance ID (GE owner instance / RBAC scope)",
|
||||||
json_schema_extra={
|
json_schema_extra={
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Feature-Instanz-ID",
|
"label": "Feature-Instanz-ID",
|
||||||
"frontend_fk_source": "/api/features/instances",
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
"frontend_fk_display_field": "label",
|
},
|
||||||
"fk_model": "FeatureInstance",
|
)
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
targetFeatureInstanceId: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
description="Target feature instance for execution data scope. NULL for templates, mandatory for non-templates.",
|
||||||
|
json_schema_extra={
|
||||||
|
"frontend_type": "select",
|
||||||
|
"frontend_readonly": False,
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Ziel-Instanz",
|
||||||
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
label: str = Field(
|
label: str = Field(
|
||||||
|
|
@ -104,7 +109,12 @@ class AutoWorkflow(PowerOnModel):
|
||||||
isTemplate: bool = Field(
|
isTemplate: bool = Field(
|
||||||
default=False,
|
default=False,
|
||||||
description="Whether this workflow is a template",
|
description="Whether this workflow is a template",
|
||||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Ist Vorlage"},
|
json_schema_extra={
|
||||||
|
"frontend_type": "checkbox",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Ist Vorlage",
|
||||||
|
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
templateSourceId: Optional[str] = Field(
|
templateSourceId: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
|
|
@ -114,18 +124,43 @@ class AutoWorkflow(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Vorlagen-Quelle",
|
"label": "Vorlagen-Quelle",
|
||||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
# Soft FK: holds either a real AutoWorkflow.id (UUID, when copied
|
||||||
|
# from a stored template) OR an in-code sentinel like
|
||||||
|
# "trustee-receipt-import" (when bootstrapped from
|
||||||
|
# featureModule.getTemplateWorkflows()). Sentinel values do not
|
||||||
|
# exist as DB rows by design — orphan cleanup MUST skip this column.
|
||||||
|
"fk_target": {
|
||||||
|
"db": "poweron_graphicaleditor",
|
||||||
|
"table": "AutoWorkflow",
|
||||||
|
"labelField": "label",
|
||||||
|
"softFk": True,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
templateScope: Optional[str] = Field(
|
templateScope: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Template scope: user, instance, mandate, system (AutoTemplateScope)",
|
description="Template scope: user, instance, mandate, system (AutoTemplateScope)",
|
||||||
json_schema_extra={"frontend_type": "select", "frontend_required": False, "label": "Vorlagen-Bereich"},
|
json_schema_extra={
|
||||||
|
"frontend_type": "select",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Vorlagen-Bereich",
|
||||||
|
"frontend_options": [
|
||||||
|
{"value": "user", "label": "Meine"},
|
||||||
|
{"value": "instance", "label": "Instanz"},
|
||||||
|
{"value": "mandate", "label": "Mandant"},
|
||||||
|
{"value": "system", "label": "System"},
|
||||||
|
],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
sharedReadOnly: bool = Field(
|
sharedReadOnly: bool = Field(
|
||||||
default=False,
|
default=False,
|
||||||
description="If true, shared template is read-only for non-owners",
|
description="If true, shared template is read-only for non-owners",
|
||||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Freigabe nur-lesen"},
|
json_schema_extra={
|
||||||
|
"frontend_type": "checkbox",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Freigabe nur-lesen",
|
||||||
|
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
currentVersionId: Optional[str] = Field(
|
currentVersionId: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
|
|
@ -135,13 +170,18 @@ class AutoWorkflow(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Aktuelle Version",
|
"label": "Aktuelle Version",
|
||||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion"},
|
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion", "labelField": "versionNumber"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
active: bool = Field(
|
active: bool = Field(
|
||||||
default=True,
|
default=True,
|
||||||
description="Whether workflow is active",
|
description="Whether workflow is active",
|
||||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Aktiv"},
|
json_schema_extra={
|
||||||
|
"frontend_type": "checkbox",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Aktiv",
|
||||||
|
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
eventId: Optional[str] = Field(
|
eventId: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
|
|
@ -151,7 +191,12 @@ class AutoWorkflow(PowerOnModel):
|
||||||
notifyOnFailure: bool = Field(
|
notifyOnFailure: bool = Field(
|
||||||
default=True,
|
default=True,
|
||||||
description="Send notification (in-app + email) when a run fails",
|
description="Send notification (in-app + email) when a run fails",
|
||||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Bei Fehler benachrichtigen"},
|
json_schema_extra={
|
||||||
|
"frontend_type": "checkbox",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Bei Fehler benachrichtigen",
|
||||||
|
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
# Legacy fields kept for backward compatibility during transition
|
# Legacy fields kept for backward compatibility during transition
|
||||||
graph: Dict[str, Any] = Field(
|
graph: Dict[str, Any] = Field(
|
||||||
|
|
@ -184,7 +229,7 @@ class AutoVersion(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"label": "Workflow-ID",
|
"label": "Workflow-ID",
|
||||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
versionNumber: int = Field(
|
versionNumber: int = Field(
|
||||||
|
|
@ -195,7 +240,16 @@ class AutoVersion(PowerOnModel):
|
||||||
status: str = Field(
|
status: str = Field(
|
||||||
default=AutoWorkflowStatus.DRAFT.value,
|
default=AutoWorkflowStatus.DRAFT.value,
|
||||||
description="Version status: draft, published, archived",
|
description="Version status: draft, published, archived",
|
||||||
json_schema_extra={"frontend_type": "select", "frontend_required": False, "label": "Status"},
|
json_schema_extra={
|
||||||
|
"frontend_type": "select",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Status",
|
||||||
|
"frontend_options": [
|
||||||
|
{"value": "draft", "label": "Entwurf"},
|
||||||
|
{"value": "published", "label": "Veröffentlicht"},
|
||||||
|
{"value": "archived", "label": "Archiviert"},
|
||||||
|
],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
graph: Dict[str, Any] = Field(
|
graph: Dict[str, Any] = Field(
|
||||||
default_factory=dict,
|
default_factory=dict,
|
||||||
|
|
@ -210,7 +264,7 @@ class AutoVersion(PowerOnModel):
|
||||||
publishedAt: Optional[float] = Field(
|
publishedAt: Optional[float] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Timestamp when version was published",
|
description="Timestamp when version was published",
|
||||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Veröffentlicht am"},
|
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Veröffentlicht am"},
|
||||||
)
|
)
|
||||||
publishedBy: Optional[str] = Field(
|
publishedBy: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
|
|
@ -220,7 +274,7 @@ class AutoVersion(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Veröffentlicht von",
|
"label": "Veröffentlicht von",
|
||||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -243,7 +297,7 @@ class AutoRun(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"label": "Workflow-ID",
|
"label": "Workflow-ID",
|
||||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
label: Optional[str] = Field(
|
label: Optional[str] = Field(
|
||||||
|
|
@ -259,10 +313,7 @@ class AutoRun(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Mandanten-ID",
|
"label": "Mandanten-ID",
|
||||||
"frontend_fk_source": "/api/mandates/",
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
"frontend_fk_display_field": "label",
|
|
||||||
"fk_model": "Mandate",
|
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
ownerId: Optional[str] = Field(
|
ownerId: Optional[str] = Field(
|
||||||
|
|
@ -273,7 +324,7 @@ class AutoRun(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Auslöser",
|
"label": "Auslöser",
|
||||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
versionId: Optional[str] = Field(
|
versionId: Optional[str] = Field(
|
||||||
|
|
@ -284,13 +335,24 @@ class AutoRun(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Versions-ID",
|
"label": "Versions-ID",
|
||||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion"},
|
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion", "labelField": "versionNumber"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
status: str = Field(
|
status: str = Field(
|
||||||
default=AutoRunStatus.RUNNING.value,
|
default=AutoRunStatus.RUNNING.value,
|
||||||
description="Status: running, paused, completed, failed, cancelled",
|
description="Status: running, paused, completed, failed, cancelled",
|
||||||
json_schema_extra={"frontend_type": "text", "frontend_required": False, "label": "Status"},
|
json_schema_extra={
|
||||||
|
"frontend_type": "select",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Status",
|
||||||
|
"frontend_options": [
|
||||||
|
{"value": "running", "label": "Läuft"},
|
||||||
|
{"value": "paused", "label": "Pausiert"},
|
||||||
|
{"value": "completed", "label": "Abgeschlossen"},
|
||||||
|
{"value": "failed", "label": "Fehlgeschlagen"},
|
||||||
|
{"value": "cancelled", "label": "Abgebrochen"},
|
||||||
|
],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
trigger: Dict[str, Any] = Field(
|
trigger: Dict[str, Any] = Field(
|
||||||
default_factory=dict,
|
default_factory=dict,
|
||||||
|
|
@ -300,12 +362,12 @@ class AutoRun(PowerOnModel):
|
||||||
startedAt: Optional[float] = Field(
|
startedAt: Optional[float] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Run start timestamp",
|
description="Run start timestamp",
|
||||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
||||||
)
|
)
|
||||||
completedAt: Optional[float] = Field(
|
completedAt: Optional[float] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Run completion timestamp",
|
description="Run completion timestamp",
|
||||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
||||||
)
|
)
|
||||||
nodeOutputs: Dict[str, Any] = Field(
|
nodeOutputs: Dict[str, Any] = Field(
|
||||||
default_factory=dict,
|
default_factory=dict,
|
||||||
|
|
@ -357,7 +419,7 @@ class AutoStepLog(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"label": "Lauf-ID",
|
"label": "Lauf-ID",
|
||||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun"},
|
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
nodeId: str = Field(
|
nodeId: str = Field(
|
||||||
|
|
@ -371,7 +433,18 @@ class AutoStepLog(PowerOnModel):
|
||||||
status: str = Field(
|
status: str = Field(
|
||||||
default=AutoStepStatus.PENDING.value,
|
default=AutoStepStatus.PENDING.value,
|
||||||
description="Step status: pending, running, completed, failed, skipped",
|
description="Step status: pending, running, completed, failed, skipped",
|
||||||
json_schema_extra={"frontend_type": "text", "frontend_required": False, "label": "Status"},
|
json_schema_extra={
|
||||||
|
"frontend_type": "select",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Status",
|
||||||
|
"frontend_options": [
|
||||||
|
{"value": "pending", "label": "Wartend"},
|
||||||
|
{"value": "running", "label": "Läuft"},
|
||||||
|
{"value": "completed", "label": "Abgeschlossen"},
|
||||||
|
{"value": "failed", "label": "Fehlgeschlagen"},
|
||||||
|
{"value": "skipped", "label": "Übersprungen"},
|
||||||
|
],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
inputSnapshot: Dict[str, Any] = Field(
|
inputSnapshot: Dict[str, Any] = Field(
|
||||||
default_factory=dict,
|
default_factory=dict,
|
||||||
|
|
@ -391,12 +464,12 @@ class AutoStepLog(PowerOnModel):
|
||||||
startedAt: Optional[float] = Field(
|
startedAt: Optional[float] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Step start timestamp",
|
description="Step start timestamp",
|
||||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
||||||
)
|
)
|
||||||
completedAt: Optional[float] = Field(
|
completedAt: Optional[float] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Step completion timestamp",
|
description="Step completion timestamp",
|
||||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
||||||
)
|
)
|
||||||
durationMs: Optional[int] = Field(
|
durationMs: Optional[int] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
|
|
@ -433,7 +506,7 @@ class AutoTask(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"label": "Lauf-ID",
|
"label": "Lauf-ID",
|
||||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun"},
|
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
workflowId: str = Field(
|
workflowId: str = Field(
|
||||||
|
|
@ -443,7 +516,7 @@ class AutoTask(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"label": "Workflow-ID",
|
"label": "Workflow-ID",
|
||||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
nodeId: str = Field(
|
nodeId: str = Field(
|
||||||
|
|
@ -467,13 +540,23 @@ class AutoTask(PowerOnModel):
|
||||||
"frontend_readonly": False,
|
"frontend_readonly": False,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Zugewiesen an",
|
"label": "Zugewiesen an",
|
||||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
status: str = Field(
|
status: str = Field(
|
||||||
default=AutoTaskStatus.PENDING.value,
|
default=AutoTaskStatus.PENDING.value,
|
||||||
description="Status: pending, completed, cancelled, expired",
|
description="Status: pending, completed, cancelled, expired",
|
||||||
json_schema_extra={"frontend_type": "text", "frontend_required": False, "label": "Status"},
|
json_schema_extra={
|
||||||
|
"frontend_type": "select",
|
||||||
|
"frontend_required": False,
|
||||||
|
"label": "Status",
|
||||||
|
"frontend_options": [
|
||||||
|
{"value": "pending", "label": "Wartend"},
|
||||||
|
{"value": "completed", "label": "Abgeschlossen"},
|
||||||
|
{"value": "cancelled", "label": "Abgebrochen"},
|
||||||
|
{"value": "expired", "label": "Abgelaufen"},
|
||||||
|
],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
result: Optional[Dict[str, Any]] = Field(
|
result: Optional[Dict[str, Any]] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
|
|
@ -483,7 +566,7 @@ class AutoTask(PowerOnModel):
|
||||||
expiresAt: Optional[float] = Field(
|
expiresAt: Optional[float] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Expiration timestamp for the task",
|
description="Expiration timestamp for the task",
|
||||||
json_schema_extra={"frontend_type": "datetime", "frontend_required": False, "label": "Läuft ab am"},
|
json_schema_extra={"frontend_type": "timestamp", "frontend_required": False, "label": "Läuft ab am"},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,21 +7,35 @@ Uses PostgreSQL poweron_graphicaleditor database (Greenfield).
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import logging
|
import logging
|
||||||
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Dict, Any, List, Optional
|
from typing import Dict, Any, List, Optional
|
||||||
|
|
||||||
|
|
||||||
def _make_json_serializable(obj: Any) -> Any:
|
_INTERNAL_SKIP_KEYS = frozenset({"_context", "_orderedNodes"})
|
||||||
|
|
||||||
|
|
||||||
|
def _make_json_serializable(obj: Any, _depth: int = 0) -> Any:
|
||||||
"""
|
"""
|
||||||
Recursively convert bytes to base64 strings so structures can be JSON-serialized
|
Recursively convert bytes to base64 strings so structures can be JSON-serialized
|
||||||
for storage in JSONB columns.
|
for storage in JSONB columns.
|
||||||
|
|
||||||
|
Internal runtime keys (_context, _orderedNodes) are skipped — they hold live
|
||||||
|
Python objects (including back-references to nodeOutputs) and must never be
|
||||||
|
stored. A depth guard prevents runaway recursion on unexpected circular refs.
|
||||||
"""
|
"""
|
||||||
|
if _depth > 50:
|
||||||
|
return None
|
||||||
if isinstance(obj, bytes):
|
if isinstance(obj, bytes):
|
||||||
return base64.b64encode(obj).decode("ascii")
|
return base64.b64encode(obj).decode("ascii")
|
||||||
if isinstance(obj, dict):
|
if isinstance(obj, dict):
|
||||||
return {k: _make_json_serializable(v) for k, v in obj.items()}
|
return {
|
||||||
|
k: _make_json_serializable(v, _depth + 1)
|
||||||
|
for k, v in obj.items()
|
||||||
|
if k not in _INTERNAL_SKIP_KEYS
|
||||||
|
}
|
||||||
if isinstance(obj, list):
|
if isinstance(obj, list):
|
||||||
return [_make_json_serializable(v) for v in obj]
|
return [_make_json_serializable(v, _depth + 1) for v in obj]
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
from modules.datamodels.datamodelUam import User
|
from modules.datamodels.datamodelUam import User
|
||||||
|
|
@ -216,6 +230,8 @@ class GraphicalEditorObjects:
|
||||||
data["id"] = str(uuid.uuid4())
|
data["id"] = str(uuid.uuid4())
|
||||||
data["mandateId"] = self.mandateId
|
data["mandateId"] = self.mandateId
|
||||||
data["featureInstanceId"] = self.featureInstanceId
|
data["featureInstanceId"] = self.featureInstanceId
|
||||||
|
if not data.get("targetFeatureInstanceId") and not data.get("isTemplate"):
|
||||||
|
data["targetFeatureInstanceId"] = self.featureInstanceId
|
||||||
if "active" not in data or data.get("active") is None:
|
if "active" not in data or data.get("active") is None:
|
||||||
data["active"] = True
|
data["active"] = True
|
||||||
data["invocations"] = normalize_invocations_list(data.get("invocations"))
|
data["invocations"] = normalize_invocations_list(data.get("invocations"))
|
||||||
|
|
@ -278,6 +294,7 @@ class GraphicalEditorObjects:
|
||||||
"workflowId": workflowId,
|
"workflowId": workflowId,
|
||||||
"label": label,
|
"label": label,
|
||||||
"status": "running",
|
"status": "running",
|
||||||
|
"startedAt": time.time(),
|
||||||
"nodeOutputs": _make_json_serializable(nodeOutputs or {}),
|
"nodeOutputs": _make_json_serializable(nodeOutputs or {}),
|
||||||
"currentNodeId": None,
|
"currentNodeId": None,
|
||||||
"context": ctx,
|
"context": ctx,
|
||||||
|
|
@ -314,6 +331,8 @@ class GraphicalEditorObjects:
|
||||||
updates = {}
|
updates = {}
|
||||||
if status is not None:
|
if status is not None:
|
||||||
updates["status"] = status
|
updates["status"] = status
|
||||||
|
if status in ("completed", "failed", "stopped", "cancelled") and not run.get("completedAt"):
|
||||||
|
updates["completedAt"] = time.time()
|
||||||
if nodeOutputs is not None:
|
if nodeOutputs is not None:
|
||||||
updates["nodeOutputs"] = _make_json_serializable(nodeOutputs)
|
updates["nodeOutputs"] = _make_json_serializable(nodeOutputs)
|
||||||
if currentNodeId is not None:
|
if currentNodeId is not None:
|
||||||
|
|
|
||||||
172
modules/features/graphicalEditor/nodeAdapter.py
Normal file
172
modules/features/graphicalEditor/nodeAdapter.py
Normal file
|
|
@ -0,0 +1,172 @@
|
||||||
|
# Copyright (c) 2025 Patrick Motsch
|
||||||
|
# All rights reserved.
|
||||||
|
"""
|
||||||
|
Schicht-3 Adapter Layer — projects Schicht-2 Actions into Editor-Node form.
|
||||||
|
|
||||||
|
Architecture (see wiki/c-work/1-plan/2026-04-typed-action-architecture.md):
|
||||||
|
- Schicht 1: Types Catalog (portTypes.PORT_TYPE_CATALOG)
|
||||||
|
- Schicht 2: Methods/Actions (modules/workflows/methods/method*) - source of truth
|
||||||
|
for Backend capabilities (parameter types, output types).
|
||||||
|
- Schicht 3: Adapters (this module) - Editor-Node + AI-Agent-Tool wrappers around
|
||||||
|
Actions. References Action signature, never duplicates types.
|
||||||
|
- Schicht 4: Workflow-Bindings + Agent-Tool-Calls (instance-level wiring).
|
||||||
|
|
||||||
|
This module defines the in-code Adapter representation (NodeAdapter,
|
||||||
|
UserParamMapping) and the projection helpers that convert between the
|
||||||
|
legacy node-dict wire format and the typed Adapter view.
|
||||||
|
|
||||||
|
Wire-format compatibility: the legacy dicts in nodeDefinitions/*.py remain
|
||||||
|
the wire format consumed by the frontend until Phase 4. This module exposes
|
||||||
|
an Adapter VIEW over those dicts so the validator and AI-tool generator can
|
||||||
|
operate on a clean, typed structure without breaking consumers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any, Dict, List, Mapping, Optional
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class UserParamMapping:
|
||||||
|
"""Maps an Action argument into a Node's user-facing parameter.
|
||||||
|
|
||||||
|
The Action signature is the source of truth for type/required/description.
|
||||||
|
This mapping carries Editor-specific overrides (label, UI hints, conditional
|
||||||
|
visibility) but never re-declares the type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
actionArg: str
|
||||||
|
label: Optional[Any] = None
|
||||||
|
description: Optional[Any] = None
|
||||||
|
uiHint: Optional[str] = None
|
||||||
|
frontendOptions: Optional[Any] = None
|
||||||
|
visibleWhen: Optional[Dict[str, Any]] = None
|
||||||
|
defaultValue: Any = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class NodeAdapter:
|
||||||
|
"""Schicht-3 Editor-Node adapter — binds to a Schicht-2 Action.
|
||||||
|
|
||||||
|
All type information for `userParams` is inherited from the bound Action.
|
||||||
|
The adapter only carries Editor-specific concerns (UI labels, port topology,
|
||||||
|
icon/color metadata).
|
||||||
|
"""
|
||||||
|
|
||||||
|
nodeId: str
|
||||||
|
bindsAction: str
|
||||||
|
category: str
|
||||||
|
label: Any
|
||||||
|
description: Any
|
||||||
|
userParams: List[UserParamMapping] = field(default_factory=list)
|
||||||
|
contextParams: Dict[str, str] = field(default_factory=dict)
|
||||||
|
inputs: int = 1
|
||||||
|
outputs: int = 1
|
||||||
|
inputAccepts: List[List[str]] = field(default_factory=list)
|
||||||
|
outputLabels: Optional[List[Any]] = None
|
||||||
|
meta: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
|
def _isMethodBoundNode(node: Mapping[str, Any]) -> bool:
|
||||||
|
"""True if a legacy node dict is bound to a Schicht-2 Action."""
|
||||||
|
return bool(node.get("_method") and node.get("_action"))
|
||||||
|
|
||||||
|
|
||||||
|
def bindsActionFromLegacy(node: Mapping[str, Any]) -> Optional[str]:
|
||||||
|
"""Build the canonical 'method.action' identifier from a legacy node dict.
|
||||||
|
|
||||||
|
Returns None for framework-primitive nodes (trigger/flow/input/data).
|
||||||
|
"""
|
||||||
|
method = node.get("_method")
|
||||||
|
action = node.get("_action")
|
||||||
|
if not method or not action:
|
||||||
|
return None
|
||||||
|
return f"{method}.{action}"
|
||||||
|
|
||||||
|
|
||||||
|
def _userParamFromLegacyParam(legacyParam: Mapping[str, Any]) -> UserParamMapping:
|
||||||
|
"""Project a legacy parameter dict into a UserParamMapping view.
|
||||||
|
|
||||||
|
The view carries only Editor-overrides; type/required come from the Action.
|
||||||
|
"""
|
||||||
|
return UserParamMapping(
|
||||||
|
actionArg=str(legacyParam.get("name", "")),
|
||||||
|
label=legacyParam.get("label"),
|
||||||
|
description=legacyParam.get("description"),
|
||||||
|
uiHint=legacyParam.get("frontendType"),
|
||||||
|
frontendOptions=legacyParam.get("frontendOptions"),
|
||||||
|
visibleWhen=_extractVisibleWhen(legacyParam.get("frontendOptions")),
|
||||||
|
defaultValue=legacyParam.get("default"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _extractVisibleWhen(frontendOptions: Any) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Extract conditional-visibility hint from legacy frontendOptions.showWhen."""
|
||||||
|
if not isinstance(frontendOptions, dict):
|
||||||
|
return None
|
||||||
|
dependsOn = frontendOptions.get("dependsOn")
|
||||||
|
showWhen = frontendOptions.get("showWhen")
|
||||||
|
if not dependsOn or not showWhen:
|
||||||
|
return None
|
||||||
|
return {"actionArg": str(dependsOn), "in": list(showWhen) if isinstance(showWhen, (list, tuple)) else [showWhen]}
|
||||||
|
|
||||||
|
|
||||||
|
def _adapterFromLegacyNode(node: Mapping[str, Any]) -> Optional[NodeAdapter]:
|
||||||
|
"""Build a NodeAdapter view from a legacy node dict.
|
||||||
|
|
||||||
|
Returns None for framework-primitive nodes (no _method/_action binding).
|
||||||
|
Pure projection — no validation, no Action-signature lookup.
|
||||||
|
"""
|
||||||
|
if not _isMethodBoundNode(node):
|
||||||
|
return None
|
||||||
|
|
||||||
|
bindsAction = bindsActionFromLegacy(node)
|
||||||
|
if not bindsAction:
|
||||||
|
return None
|
||||||
|
|
||||||
|
inputAccepts = _projectInputAccepts(node)
|
||||||
|
|
||||||
|
return NodeAdapter(
|
||||||
|
nodeId=str(node.get("id", "")),
|
||||||
|
bindsAction=bindsAction,
|
||||||
|
category=str(node.get("category", "")),
|
||||||
|
label=node.get("label", ""),
|
||||||
|
description=node.get("description", ""),
|
||||||
|
userParams=[_userParamFromLegacyParam(p) for p in (node.get("parameters") or [])],
|
||||||
|
contextParams={},
|
||||||
|
inputs=int(node.get("inputs", 1)),
|
||||||
|
outputs=int(node.get("outputs", 1)),
|
||||||
|
inputAccepts=inputAccepts,
|
||||||
|
outputLabels=node.get("outputLabels"),
|
||||||
|
meta=dict(node.get("meta") or {}),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _projectInputAccepts(node: Mapping[str, Any]) -> List[List[str]]:
|
||||||
|
"""Convert legacy `inputPorts` dict-of-dicts into a per-port `accepts` list."""
|
||||||
|
inputPorts = node.get("inputPorts") or {}
|
||||||
|
if not isinstance(inputPorts, dict):
|
||||||
|
return []
|
||||||
|
inputs = int(node.get("inputs", 0) or 0)
|
||||||
|
if inputs <= 0:
|
||||||
|
return []
|
||||||
|
out: List[List[str]] = []
|
||||||
|
for portIdx in range(inputs):
|
||||||
|
portCfg = inputPorts.get(portIdx) or inputPorts.get(str(portIdx)) or {}
|
||||||
|
accepts = portCfg.get("accepts") if isinstance(portCfg, dict) else None
|
||||||
|
out.append(list(accepts) if isinstance(accepts, (list, tuple)) else [])
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def _projectAllAdapters(staticNodes: List[Mapping[str, Any]]) -> Dict[str, NodeAdapter]:
|
||||||
|
"""Project a list of legacy node dicts into a {nodeId: NodeAdapter} map.
|
||||||
|
|
||||||
|
Framework-primitive nodes (no Action binding) are silently skipped.
|
||||||
|
"""
|
||||||
|
out: Dict[str, NodeAdapter] = {}
|
||||||
|
for node in staticNodes:
|
||||||
|
adapter = _adapterFromLegacyNode(node)
|
||||||
|
if adapter is not None:
|
||||||
|
out[adapter.nodeId] = adapter
|
||||||
|
return out
|
||||||
|
|
@ -3,6 +3,136 @@
|
||||||
|
|
||||||
from modules.shared.i18nRegistry import t
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
|
from modules.features.graphicalEditor.nodeDefinitions.contextPickerHelp import (
|
||||||
|
CONTEXT_BUILDER_PARAM_DESCRIPTION,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Shared authoritative DataPicker paths (same handover idea as ``context.extractContent`` outputPorts).
|
||||||
|
ACTION_RESULT_DATA_PICK_OPTIONS = [
|
||||||
|
{
|
||||||
|
"path": ["documents", 0, "documentData"],
|
||||||
|
"pickerLabel": t("Gesamter Inhalt"),
|
||||||
|
"detail": t(
|
||||||
|
"Strukturiertes Handover als JSON inklusive aller Textteile "
|
||||||
|
"und Verweisen auf ausgelagerte Bilder."
|
||||||
|
),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "Any",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["response"],
|
||||||
|
"pickerLabel": t("Nur Text"),
|
||||||
|
"detail": t("Verketteter Klartext aus allen erkannten Textteilen."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "str",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["imageDocumentsOnly"],
|
||||||
|
"pickerLabel": t("Nur Bilder"),
|
||||||
|
"detail": t("Nur die extrahierten Bilddokumente als Liste, ohne JSON-Handover."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "List[ActionDocument]",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["documents"],
|
||||||
|
"pickerLabel": t("Alle Dateitypen"),
|
||||||
|
"detail": t("Alle Ausgabedokumente nacheinander: JSON-Handover und Bilder."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "List[ActionDocument]",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
AI_RESULT_DATA_PICK_OPTIONS = [
|
||||||
|
{
|
||||||
|
"path": ["documents", 0, "documentData"],
|
||||||
|
"pickerLabel": t("Gesamter Inhalt"),
|
||||||
|
"detail": t(
|
||||||
|
"Hauptausgabedatei oder strukturierter Inhalt von ``documents[0]`` "
|
||||||
|
"(z. B. erzeugtes Dokument, JSON-Handover)."
|
||||||
|
),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "Any",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["response"],
|
||||||
|
"pickerLabel": t("Nur Text"),
|
||||||
|
"detail": t("Modell-Antwort als reiner Fließtext (ohne eingebettete Bildbytes)."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "str",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["imageDocumentsOnly"],
|
||||||
|
"pickerLabel": t("Nur Bilder"),
|
||||||
|
"detail": t("Nur Bild-Dokumente aus ``documents`` (ohne erstes Nicht-Bild-Artefakt, falls gesetzt)."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "List[ActionDocument]",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["documents"],
|
||||||
|
"pickerLabel": t("Alle Ausgabedateien"),
|
||||||
|
"detail": t("Alle Dokumente der KI-Antwort: erzeugte Dateien, Bilder, Anhänge."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "List[Document]",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
DOCUMENT_LIST_DATA_PICK_OPTIONS = [
|
||||||
|
{
|
||||||
|
"path": ["documents"],
|
||||||
|
"pickerLabel": t("Alle Dokumente"),
|
||||||
|
"detail": t("Die vollständige Dokumentenliste."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "List[Document]",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["documents", 0],
|
||||||
|
"pickerLabel": t("Erstes Dokument"),
|
||||||
|
"detail": t("Metadaten und Pfade des ersten Listeneintrags."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "Document",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["count"],
|
||||||
|
"pickerLabel": t("Anzahl"),
|
||||||
|
"detail": t("Anzahl der Dokumente."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "int",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
CONSOLIDATE_RESULT_DATA_PICK_OPTIONS = [
|
||||||
|
{
|
||||||
|
"path": ["result"],
|
||||||
|
"pickerLabel": t("Konsolidiertes Ergebnis"),
|
||||||
|
"detail": t("Text oder Struktur nach Konsolidierung."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "Any",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["mode"],
|
||||||
|
"pickerLabel": t("Modus"),
|
||||||
|
"detail": t("Verwendeter Konsolidierungsmodus."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "str",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["count"],
|
||||||
|
"pickerLabel": t("Anzahl"),
|
||||||
|
"detail": t("Anzahl zusammengeführter Elemente."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "int",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
_AI_COMMON_PARAMS = [
|
||||||
|
{"name": "requireNeutralization", "type": "bool", "required": False,
|
||||||
|
"frontendType": "checkbox", "default": False,
|
||||||
|
"description": t("Eingaben fuer diesen Call neutralisieren")},
|
||||||
|
{"name": "allowedModels", "type": "array", "required": False,
|
||||||
|
"frontendType": "modelMultiSelect", "default": [],
|
||||||
|
"description": t("Erlaubte LLM-Modelle (leer = alle erlaubten)")},
|
||||||
|
]
|
||||||
|
|
||||||
AI_NODES = [
|
AI_NODES = [
|
||||||
{
|
{
|
||||||
"id": "ai.prompt",
|
"id": "ai.prompt",
|
||||||
|
|
@ -10,22 +140,27 @@ AI_NODES = [
|
||||||
"label": t("Prompt"),
|
"label": t("Prompt"),
|
||||||
"description": t("Prompt eingeben und KI führt aus"),
|
"description": t("Prompt eingeben und KI führt aus"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "aiPrompt", "type": "string", "required": True, "frontendType": "textarea",
|
{"name": "aiPrompt", "type": "str", "required": True, "frontendType": "templateTextarea",
|
||||||
"description": t("KI-Prompt")},
|
"description": t("KI-Prompt")},
|
||||||
{"name": "outputFormat", "type": "string", "required": False, "frontendType": "select",
|
{"name": "resultType", "type": "str", "required": False, "frontendType": "select",
|
||||||
"frontendOptions": {"options": ["text", "json", "emailDraft"]},
|
"frontendOptions": {"options": ["txt", "json", "md", "csv", "xml", "html", "pdf", "docx", "xlsx", "pptx", "png", "jpg"]},
|
||||||
"description": t("Ausgabeformat"), "default": "text"},
|
"description": t("Ausgabeformat"), "default": "txt"},
|
||||||
{"name": "documentList", "type": "string", "required": False, "frontendType": "hidden",
|
{"name": "documentList", "type": "DocumentList", "required": False, "frontendType": "hidden",
|
||||||
"description": t("Dokumentenliste (via Wire oder DataRef)"), "default": ""},
|
"description": t("Dokumente aus vorherigen Schritten"), "default": "",
|
||||||
{"name": "context", "type": "string", "required": False, "frontendType": "hidden",
|
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||||
"description": t("Kontext-Daten (via Wire oder DataRef)"), "default": ""},
|
{"name": "context", "type": "Any", "required": False, "frontendType": "contextBuilder",
|
||||||
{"name": "simpleMode", "type": "boolean", "required": False, "frontendType": "checkbox",
|
"description": CONTEXT_BUILDER_PARAM_DESCRIPTION, "default": "",
|
||||||
|
"graphInherit": {"port": 0, "kind": "primaryTextRef"}},
|
||||||
|
{"name": "simpleMode", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||||
"description": t("Einfacher Modus"), "default": True},
|
"description": t("Einfacher Modus"), "default": True},
|
||||||
],
|
] + _AI_COMMON_PARAMS,
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["DocumentList", "AiResult", "TextResult", "Transit"]}},
|
"inputPorts": {0: {"accepts": [
|
||||||
"outputPorts": {0: {"schema": "AiResult"}},
|
"FormPayload", "DocumentList", "AiResult", "TextResult", "Transit", "LoopItem", "ActionResult",
|
||||||
|
]}},
|
||||||
|
"outputPorts": {0: {"schema": "AiResult", "dataPickOptions": AI_RESULT_DATA_PICK_OPTIONS}},
|
||||||
|
"paramMappers": ["aiPromptLegacyAlias"],
|
||||||
"meta": {"icon": "mdi-robot", "color": "#9C27B0", "usesAi": True},
|
"meta": {"icon": "mdi-robot", "color": "#9C27B0", "usesAi": True},
|
||||||
"_method": "ai",
|
"_method": "ai",
|
||||||
"_action": "process",
|
"_action": "process",
|
||||||
|
|
@ -36,13 +171,21 @@ AI_NODES = [
|
||||||
"label": t("Web-Recherche"),
|
"label": t("Web-Recherche"),
|
||||||
"description": t("Recherche im Web"),
|
"description": t("Recherche im Web"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "prompt", "type": "string", "required": True, "frontendType": "textarea",
|
{"name": "prompt", "type": "str", "required": True, "frontendType": "textarea",
|
||||||
"description": t("Recherche-Anfrage")},
|
"description": t("Recherche-Anfrage")},
|
||||||
],
|
{"name": "context", "type": "Any", "required": False, "frontendType": "contextBuilder",
|
||||||
|
"description": CONTEXT_BUILDER_PARAM_DESCRIPTION, "default": "",
|
||||||
|
"graphInherit": {"port": 0, "kind": "primaryTextRef"}},
|
||||||
|
{"name": "documentList", "type": "DocumentList", "required": False, "frontendType": "hidden",
|
||||||
|
"description": t("Dokumente aus vorherigen Schritten"), "default": "",
|
||||||
|
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||||
|
] + _AI_COMMON_PARAMS,
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": [
|
||||||
"outputPorts": {0: {"schema": "AiResult"}},
|
"FormPayload", "Transit", "AiResult", "DocumentList", "ActionResult", "LoopItem", "TextResult",
|
||||||
|
]}},
|
||||||
|
"outputPorts": {0: {"schema": "AiResult", "dataPickOptions": AI_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-magnify", "color": "#9C27B0", "usesAi": True},
|
"meta": {"icon": "mdi-magnify", "color": "#9C27B0", "usesAi": True},
|
||||||
"_method": "ai",
|
"_method": "ai",
|
||||||
"_action": "webResearch",
|
"_action": "webResearch",
|
||||||
|
|
@ -53,14 +196,23 @@ AI_NODES = [
|
||||||
"label": t("Dokument zusammenfassen"),
|
"label": t("Dokument zusammenfassen"),
|
||||||
"description": t("Dokumentinhalt zusammenfassen"),
|
"description": t("Dokumentinhalt zusammenfassen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "summaryLength", "type": "string", "required": False, "frontendType": "select",
|
{"name": "documentList", "type": "DocumentList", "required": True, "frontendType": "dataRef",
|
||||||
"frontendOptions": {"options": ["short", "medium", "long"]},
|
"description": t("Dokumente aus vorherigen Schritten"),
|
||||||
"description": t("Kurz, mittel oder lang"), "default": "medium"},
|
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||||
],
|
{"name": "resultType", "type": "str", "required": False, "frontendType": "select",
|
||||||
|
"frontendOptions": {"options": ["txt", "json", "md", "csv", "xml", "html", "pdf", "docx", "xlsx", "pptx", "png", "jpg"]},
|
||||||
|
"description": t("Ausgabeformat"), "default": "txt"},
|
||||||
|
{"name": "summaryLength", "type": "str", "required": False, "frontendType": "select",
|
||||||
|
"frontendOptions": {"options": ["brief", "medium", "detailed"]},
|
||||||
|
"description": t("Kurz, mittel oder ausführlich"), "default": "medium"},
|
||||||
|
{"name": "folderId", "type": "str", "required": False, "frontendType": "userFileFolder",
|
||||||
|
"description": t("Zielordner in Meine Dateien"),
|
||||||
|
"default": ""},
|
||||||
|
] + _AI_COMMON_PARAMS,
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
"inputPorts": {0: {"accepts": ["DocumentList", "Transit", "LoopItem"]}},
|
||||||
"outputPorts": {0: {"schema": "AiResult"}},
|
"outputPorts": {0: {"schema": "AiResult", "dataPickOptions": AI_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-file-document-outline", "color": "#9C27B0", "usesAi": True},
|
"meta": {"icon": "mdi-file-document-outline", "color": "#9C27B0", "usesAi": True},
|
||||||
"_method": "ai",
|
"_method": "ai",
|
||||||
"_action": "summarizeDocument",
|
"_action": "summarizeDocument",
|
||||||
|
|
@ -71,14 +223,22 @@ AI_NODES = [
|
||||||
"label": t("Dokument übersetzen"),
|
"label": t("Dokument übersetzen"),
|
||||||
"description": t("Dokument in Zielsprache übersetzen"),
|
"description": t("Dokument in Zielsprache übersetzen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "targetLanguage", "type": "string", "required": True, "frontendType": "select",
|
{"name": "documentList", "type": "DocumentList", "required": True, "frontendType": "dataRef",
|
||||||
"frontendOptions": {"options": ["en", "de", "fr", "it", "es", "pt", "nl"]},
|
"description": t("Dokumente aus vorherigen Schritten"),
|
||||||
"description": t("Zielsprache")},
|
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||||
],
|
{"name": "resultType", "type": "str", "required": False, "frontendType": "select",
|
||||||
|
"frontendOptions": {"options": ["txt", "json", "md", "csv", "xml", "html", "pdf", "docx", "xlsx", "pptx", "png", "jpg"]},
|
||||||
|
"description": t("Ausgabeformat"), "default": "txt"},
|
||||||
|
{"name": "targetLanguage", "type": "str", "required": True, "frontendType": "text",
|
||||||
|
"description": t("Zielsprache (z.B. de, en, French)")},
|
||||||
|
{"name": "folderId", "type": "str", "required": False, "frontendType": "userFileFolder",
|
||||||
|
"description": t("Zielordner in Meine Dateien"),
|
||||||
|
"default": ""},
|
||||||
|
] + _AI_COMMON_PARAMS,
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
"inputPorts": {0: {"accepts": ["DocumentList", "Transit", "LoopItem"]}},
|
||||||
"outputPorts": {0: {"schema": "AiResult"}},
|
"outputPorts": {0: {"schema": "AiResult", "dataPickOptions": AI_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-translate", "color": "#9C27B0", "usesAi": True},
|
"meta": {"icon": "mdi-translate", "color": "#9C27B0", "usesAi": True},
|
||||||
"_method": "ai",
|
"_method": "ai",
|
||||||
"_action": "translateDocument",
|
"_action": "translateDocument",
|
||||||
|
|
@ -89,14 +249,20 @@ AI_NODES = [
|
||||||
"label": t("Dokument konvertieren"),
|
"label": t("Dokument konvertieren"),
|
||||||
"description": t("Dokument in anderes Format konvertieren"),
|
"description": t("Dokument in anderes Format konvertieren"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "targetFormat", "type": "string", "required": True, "frontendType": "select",
|
{"name": "documentList", "type": "DocumentList", "required": True, "frontendType": "dataRef",
|
||||||
"frontendOptions": {"options": ["pdf", "docx", "txt", "html", "md"]},
|
"description": t("Dokumente aus vorherigen Schritten"),
|
||||||
|
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||||
|
{"name": "targetFormat", "type": "str", "required": True, "frontendType": "select",
|
||||||
|
"frontendOptions": {"options": ["docx", "pdf", "xlsx", "csv", "txt", "html", "json", "md"]},
|
||||||
"description": t("Zielformat")},
|
"description": t("Zielformat")},
|
||||||
],
|
{"name": "folderId", "type": "str", "required": False, "frontendType": "userFileFolder",
|
||||||
|
"description": t("Zielordner in Meine Dateien"),
|
||||||
|
"default": ""},
|
||||||
|
] + _AI_COMMON_PARAMS,
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
"inputPorts": {0: {"accepts": ["DocumentList", "Transit", "LoopItem"]}},
|
||||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
"outputPorts": {0: {"schema": "DocumentList", "dataPickOptions": DOCUMENT_LIST_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-file-convert", "color": "#9C27B0", "usesAi": True},
|
"meta": {"icon": "mdi-file-convert", "color": "#9C27B0", "usesAi": True},
|
||||||
"_method": "ai",
|
"_method": "ai",
|
||||||
"_action": "convertDocument",
|
"_action": "convertDocument",
|
||||||
|
|
@ -107,13 +273,32 @@ AI_NODES = [
|
||||||
"label": t("Dokument generieren"),
|
"label": t("Dokument generieren"),
|
||||||
"description": t("Dokument aus Prompt generieren"),
|
"description": t("Dokument aus Prompt generieren"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "prompt", "type": "string", "required": True, "frontendType": "textarea",
|
{"name": "prompt", "type": "str", "required": True, "frontendType": "textarea",
|
||||||
"description": t("Generierungs-Prompt")},
|
"description": t("Generierungs-Prompt")},
|
||||||
],
|
{"name": "outputFormat", "type": "str", "required": False, "frontendType": "select",
|
||||||
|
"frontendOptions": {"options": ["docx", "pdf", "txt", "html", "md"]},
|
||||||
|
"description": t("Ausgabeformat"), "default": "docx"},
|
||||||
|
{"name": "title", "type": "str", "required": False, "frontendType": "text",
|
||||||
|
"description": t("Dokumenttitel (Metadaten / Dateiname)"), "default": ""},
|
||||||
|
{"name": "documentType", "type": "str", "required": False, "frontendType": "select",
|
||||||
|
"frontendOptions": {"options": ["letter", "memo", "proposal", "contract", "report", "email"]},
|
||||||
|
"description": t("Dokumentart (Inhaltshinweis fuer die KI)"), "default": "proposal"},
|
||||||
|
{"name": "folderId", "type": "str", "required": False, "frontendType": "userFileFolder",
|
||||||
|
"description": t("Zielordner in Meine Dateien"),
|
||||||
|
"default": ""},
|
||||||
|
{"name": "context", "type": "Any", "required": False, "frontendType": "contextBuilder",
|
||||||
|
"description": CONTEXT_BUILDER_PARAM_DESCRIPTION, "default": "",
|
||||||
|
"graphInherit": {"port": 0, "kind": "primaryTextRef"}},
|
||||||
|
{"name": "documentList", "type": "DocumentList", "required": False, "frontendType": "hidden",
|
||||||
|
"description": t("Dokumente aus vorherigen Schritten"), "default": "",
|
||||||
|
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||||
|
] + _AI_COMMON_PARAMS,
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": [
|
||||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
"FormPayload", "Transit", "AiResult", "DocumentList", "ActionResult", "LoopItem", "TextResult",
|
||||||
|
]}},
|
||||||
|
"outputPorts": {0: {"schema": "DocumentList", "dataPickOptions": DOCUMENT_LIST_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-file-plus", "color": "#9C27B0", "usesAi": True},
|
"meta": {"icon": "mdi-file-plus", "color": "#9C27B0", "usesAi": True},
|
||||||
"_method": "ai",
|
"_method": "ai",
|
||||||
"_action": "generateDocument",
|
"_action": "generateDocument",
|
||||||
|
|
@ -124,16 +309,27 @@ AI_NODES = [
|
||||||
"label": t("Code generieren"),
|
"label": t("Code generieren"),
|
||||||
"description": t("Code aus Beschreibung generieren"),
|
"description": t("Code aus Beschreibung generieren"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "prompt", "type": "string", "required": True, "frontendType": "textarea",
|
{"name": "prompt", "type": "str", "required": True, "frontendType": "textarea",
|
||||||
"description": t("Code-Generierungs-Prompt")},
|
"description": t("Code-Generierungs-Prompt")},
|
||||||
{"name": "language", "type": "string", "required": False, "frontendType": "select",
|
{"name": "resultType", "type": "str", "required": False, "frontendType": "select",
|
||||||
"frontendOptions": {"options": ["python", "javascript", "typescript", "java", "csharp", "go"]},
|
"frontendOptions": {"options": ["py", "js", "ts", "html", "java", "cpp", "txt", "json", "csv", "xml"]},
|
||||||
"description": t("Programmiersprache"), "default": "python"},
|
"description": t("Datei-Endung der erzeugten Code-Datei"), "default": "py"},
|
||||||
],
|
{"name": "folderId", "type": "str", "required": False, "frontendType": "userFileFolder",
|
||||||
|
"description": t("Zielordner in Meine Dateien"),
|
||||||
|
"default": ""},
|
||||||
|
{"name": "context", "type": "Any", "required": False, "frontendType": "contextBuilder",
|
||||||
|
"description": CONTEXT_BUILDER_PARAM_DESCRIPTION, "default": "",
|
||||||
|
"graphInherit": {"port": 0, "kind": "primaryTextRef"}},
|
||||||
|
{"name": "documentList", "type": "DocumentList", "required": False, "frontendType": "hidden",
|
||||||
|
"description": t("Dokumente aus vorherigen Schritten"), "default": "",
|
||||||
|
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||||
|
] + _AI_COMMON_PARAMS,
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": [
|
||||||
"outputPorts": {0: {"schema": "AiResult"}},
|
"FormPayload", "Transit", "AiResult", "DocumentList", "ActionResult", "LoopItem", "TextResult",
|
||||||
|
]}},
|
||||||
|
"outputPorts": {0: {"schema": "AiResult", "dataPickOptions": AI_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-code-tags", "color": "#9C27B0", "usesAi": True},
|
"meta": {"icon": "mdi-code-tags", "color": "#9C27B0", "usesAi": True},
|
||||||
"_method": "ai",
|
"_method": "ai",
|
||||||
"_action": "generateCode",
|
"_action": "generateCode",
|
||||||
|
|
@ -144,16 +340,16 @@ AI_NODES = [
|
||||||
"label": t("KI-Konsolidierung"),
|
"label": t("KI-Konsolidierung"),
|
||||||
"description": t("Gesammelte Ergebnisse mit KI zusammenfassen, klassifizieren oder semantisch zusammenführen"),
|
"description": t("Gesammelte Ergebnisse mit KI zusammenfassen, klassifizieren oder semantisch zusammenführen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "mode", "type": "string", "required": False, "frontendType": "select",
|
{"name": "mode", "type": "str", "required": False, "frontendType": "select",
|
||||||
"frontendOptions": {"options": ["summarize", "classify", "semanticMerge"]},
|
"frontendOptions": {"options": ["summarize", "classify", "semanticMerge"]},
|
||||||
"description": t("Konsolidierungsmodus"), "default": "summarize"},
|
"description": t("Konsolidierungsmodus"), "default": "summarize"},
|
||||||
{"name": "prompt", "type": "string", "required": False, "frontendType": "textarea",
|
{"name": "prompt", "type": "str", "required": False, "frontendType": "textarea",
|
||||||
"description": t("Optionaler Prompt für die Konsolidierung"), "default": ""},
|
"description": t("Optionaler Prompt für die Konsolidierung"), "default": ""},
|
||||||
],
|
] + _AI_COMMON_PARAMS,
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["AggregateResult", "Transit"]}},
|
"inputPorts": {0: {"accepts": ["AggregateResult", "Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "ConsolidateResult"}},
|
"outputPorts": {0: {"schema": "ConsolidateResult", "dataPickOptions": CONSOLIDATE_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-table-merge-cells", "color": "#9C27B0", "usesAi": True},
|
"meta": {"icon": "mdi-table-merge-cells", "color": "#9C27B0", "usesAi": True},
|
||||||
"_method": "ai",
|
"_method": "ai",
|
||||||
"_action": "consolidate",
|
"_action": "consolidate",
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,63 @@
|
||||||
|
|
||||||
from modules.shared.i18nRegistry import t
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
|
from modules.features.graphicalEditor.nodeDefinitions.ai import ACTION_RESULT_DATA_PICK_OPTIONS
|
||||||
|
|
||||||
|
TASK_LIST_DATA_PICK_OPTIONS = [
|
||||||
|
{
|
||||||
|
"path": ["tasks"],
|
||||||
|
"pickerLabel": t("Alle Aufgaben"),
|
||||||
|
"detail": t("Vollständige Aufgabenliste."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "List[TaskItem]",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["tasks", 0],
|
||||||
|
"pickerLabel": t("Erste Aufgabe"),
|
||||||
|
"detail": t("Erstes Listenelement."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "TaskItem",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["count"],
|
||||||
|
"pickerLabel": t("Anzahl"),
|
||||||
|
"detail": t("Anzahl der Aufgaben."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "int",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["listId"],
|
||||||
|
"pickerLabel": t("Listen-ID"),
|
||||||
|
"detail": t("ClickUp-Listen-Kontext, falls gesetzt."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "str",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
TASK_RESULT_DATA_PICK_OPTIONS = [
|
||||||
|
{
|
||||||
|
"path": ["success"],
|
||||||
|
"pickerLabel": t("Erfolg"),
|
||||||
|
"detail": t("Ob der API-Aufruf erfolgreich war."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "bool",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["taskId"],
|
||||||
|
"pickerLabel": t("Aufgaben-ID"),
|
||||||
|
"detail": t("ID der betroffenen Aufgabe."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "str",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["task"],
|
||||||
|
"pickerLabel": t("Aufgabendaten"),
|
||||||
|
"detail": t("Vollständiges Task-Objekt (Dict)."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "Dict",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
CLICKUP_NODES = [
|
CLICKUP_NODES = [
|
||||||
{
|
{
|
||||||
"id": "clickup.searchTasks",
|
"id": "clickup.searchTasks",
|
||||||
|
|
@ -11,29 +68,29 @@ CLICKUP_NODES = [
|
||||||
"label": t("Aufgaben suchen"),
|
"label": t("Aufgaben suchen"),
|
||||||
"description": t("Aufgaben in einem Workspace suchen"),
|
"description": t("Aufgaben in einem Workspace suchen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "clickup"},
|
"frontendOptions": {"authority": "clickup"},
|
||||||
"description": t("ClickUp-Verbindung")},
|
"description": t("ClickUp-Verbindung")},
|
||||||
{"name": "teamId", "type": "string", "required": True, "frontendType": "text",
|
{"name": "teamId", "type": "str", "required": True, "frontendType": "text",
|
||||||
"description": t("Team-/Workspace-ID")},
|
"description": t("Team-/Workspace-ID")},
|
||||||
{"name": "query", "type": "string", "required": True, "frontendType": "text",
|
{"name": "query", "type": "str", "required": True, "frontendType": "text",
|
||||||
"description": t("Suchbegriff")},
|
"description": t("Suchbegriff")},
|
||||||
{"name": "page", "type": "number", "required": False, "frontendType": "number",
|
{"name": "page", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Seite"), "default": 0},
|
"description": t("Seite"), "default": 0},
|
||||||
{"name": "listId", "type": "string", "required": False, "frontendType": "clickupList",
|
{"name": "listId", "type": "str", "required": False, "frontendType": "clickupList",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
"description": t("In dieser Liste suchen")},
|
"description": t("In dieser Liste suchen")},
|
||||||
{"name": "includeClosed", "type": "boolean", "required": False, "frontendType": "checkbox",
|
{"name": "includeClosed", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||||
"description": t("Erledigte einbeziehen"), "default": False},
|
"description": t("Erledigte einbeziehen"), "default": False},
|
||||||
{"name": "fullTaskData", "type": "boolean", "required": False, "frontendType": "checkbox",
|
{"name": "fullTaskData", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||||
"description": t("Vollständige Daten"), "default": False},
|
"description": t("Vollständige Daten"), "default": False},
|
||||||
{"name": "matchNameOnly", "type": "boolean", "required": False, "frontendType": "checkbox",
|
{"name": "matchNameOnly", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||||
"description": t("Nur Titel"), "default": True},
|
"description": t("Nur Titel"), "default": True},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "TaskList"}},
|
"outputPorts": {0: {"schema": "TaskList", "dataPickOptions": TASK_LIST_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-magnify", "color": "#7B68EE", "usesAi": False},
|
"meta": {"icon": "mdi-magnify", "color": "#7B68EE", "usesAi": False},
|
||||||
"_method": "clickup",
|
"_method": "clickup",
|
||||||
"_action": "searchTasks",
|
"_action": "searchTasks",
|
||||||
|
|
@ -44,21 +101,21 @@ CLICKUP_NODES = [
|
||||||
"label": t("Aufgaben auflisten"),
|
"label": t("Aufgaben auflisten"),
|
||||||
"description": t("Aufgaben einer Liste auflisten"),
|
"description": t("Aufgaben einer Liste auflisten"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "clickup"},
|
"frontendOptions": {"authority": "clickup"},
|
||||||
"description": t("ClickUp-Verbindung")},
|
"description": t("ClickUp-Verbindung")},
|
||||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "clickupList",
|
{"name": "pathQuery", "type": "str", "required": True, "frontendType": "clickupList",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
"description": t("Pfad zur Liste")},
|
"description": t("Pfad zur Liste")},
|
||||||
{"name": "page", "type": "number", "required": False, "frontendType": "number",
|
{"name": "page", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Seite"), "default": 0},
|
"description": t("Seite"), "default": 0},
|
||||||
{"name": "includeClosed", "type": "boolean", "required": False, "frontendType": "checkbox",
|
{"name": "includeClosed", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||||
"description": t("Erledigte einbeziehen"), "default": False},
|
"description": t("Erledigte einbeziehen"), "default": False},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "TaskList"}},
|
"outputPorts": {0: {"schema": "TaskList", "dataPickOptions": TASK_LIST_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-format-list-bulleted", "color": "#7B68EE", "usesAi": False},
|
"meta": {"icon": "mdi-format-list-bulleted", "color": "#7B68EE", "usesAi": False},
|
||||||
"_method": "clickup",
|
"_method": "clickup",
|
||||||
"_action": "listTasks",
|
"_action": "listTasks",
|
||||||
|
|
@ -69,18 +126,18 @@ CLICKUP_NODES = [
|
||||||
"label": t("Aufgabe abrufen"),
|
"label": t("Aufgabe abrufen"),
|
||||||
"description": t("Eine Aufgabe abrufen"),
|
"description": t("Eine Aufgabe abrufen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "clickup"},
|
"frontendOptions": {"authority": "clickup"},
|
||||||
"description": t("ClickUp-Verbindung")},
|
"description": t("ClickUp-Verbindung")},
|
||||||
{"name": "taskId", "type": "string", "required": False, "frontendType": "text",
|
{"name": "taskId", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Task-ID")},
|
"description": t("Task-ID")},
|
||||||
{"name": "pathQuery", "type": "string", "required": False, "frontendType": "text",
|
{"name": "pathQuery", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Oder Pfad")},
|
"description": t("Oder Pfad")},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "TaskResult"}},
|
"outputPorts": {0: {"schema": "TaskResult", "dataPickOptions": TASK_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-file-document-outline", "color": "#7B68EE", "usesAi": False},
|
"meta": {"icon": "mdi-file-document-outline", "color": "#7B68EE", "usesAi": False},
|
||||||
"_method": "clickup",
|
"_method": "clickup",
|
||||||
"_action": "getTask",
|
"_action": "getTask",
|
||||||
|
|
@ -91,42 +148,40 @@ CLICKUP_NODES = [
|
||||||
"label": t("Aufgabe erstellen"),
|
"label": t("Aufgabe erstellen"),
|
||||||
"description": t("Aufgabe erstellen"),
|
"description": t("Aufgabe erstellen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "clickup"},
|
"frontendOptions": {"authority": "clickup"},
|
||||||
"description": t("ClickUp-Verbindung")},
|
"description": t("ClickUp-Verbindung")},
|
||||||
{"name": "teamId", "type": "string", "required": False, "frontendType": "text",
|
{"name": "pathQuery", "type": "str", "required": False, "frontendType": "clickupList",
|
||||||
"description": t("Workspace")},
|
|
||||||
{"name": "pathQuery", "type": "string", "required": False, "frontendType": "clickupList",
|
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
"description": t("Pfad zur Liste")},
|
"description": t("Pfad zur Liste")},
|
||||||
{"name": "listId", "type": "string", "required": False, "frontendType": "text",
|
{"name": "listId", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Listen-ID")},
|
"description": t("Listen-ID")},
|
||||||
{"name": "name", "type": "string", "required": True, "frontendType": "text",
|
{"name": "name", "type": "str", "required": True, "frontendType": "text",
|
||||||
"description": t("Name")},
|
"description": t("Name")},
|
||||||
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
|
{"name": "description", "type": "str", "required": False, "frontendType": "textarea",
|
||||||
"description": t("Beschreibung")},
|
"description": t("Beschreibung")},
|
||||||
{"name": "taskStatus", "type": "string", "required": False, "frontendType": "text",
|
{"name": "taskStatus", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Status")},
|
"description": t("Status")},
|
||||||
{"name": "taskPriority", "type": "string", "required": False, "frontendType": "select",
|
{"name": "taskPriority", "type": "str", "required": False, "frontendType": "select",
|
||||||
"frontendOptions": {"options": ["1", "2", "3", "4"]},
|
"frontendOptions": {"options": ["1", "2", "3", "4"]},
|
||||||
"description": t("Priorität 1-4")},
|
"description": t("Priorität 1-4")},
|
||||||
{"name": "taskDueDateMs", "type": "string", "required": False, "frontendType": "text",
|
{"name": "taskDueDateMs", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Fälligkeit (ms)")},
|
"description": t("Fälligkeit (ms)")},
|
||||||
{"name": "taskAssigneeIds", "type": "object", "required": False, "frontendType": "json",
|
{"name": "taskAssigneeIds", "type": "object", "required": False, "frontendType": "json",
|
||||||
"description": t("Zugewiesene")},
|
"description": t("Zugewiesene")},
|
||||||
{"name": "taskTimeEstimateMs", "type": "string", "required": False, "frontendType": "text",
|
{"name": "taskTimeEstimateMs", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Zeitschätzung (ms)")},
|
"description": t("Zeitschätzung (ms)")},
|
||||||
{"name": "taskTimeEstimateHours", "type": "string", "required": False, "frontendType": "text",
|
{"name": "taskTimeEstimateHours", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Zeitschätzung (h)")},
|
"description": t("Zeitschätzung (h)")},
|
||||||
{"name": "customFieldValues", "type": "object", "required": False, "frontendType": "json",
|
{"name": "customFieldValues", "type": "object", "required": False, "frontendType": "json",
|
||||||
"description": t("Benutzerdefinierte Felder")},
|
"description": t("Benutzerdefinierte Felder")},
|
||||||
{"name": "taskFields", "type": "string", "required": False, "frontendType": "json",
|
{"name": "taskFields", "type": "str", "required": False, "frontendType": "json",
|
||||||
"description": t("Zusätzliches JSON")},
|
"description": t("Zusätzliches JSON")},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "TaskResult"}},
|
"outputPorts": {0: {"schema": "TaskResult", "dataPickOptions": TASK_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-plus-circle-outline", "color": "#7B68EE", "usesAi": False},
|
"meta": {"icon": "mdi-plus-circle-outline", "color": "#7B68EE", "usesAi": False},
|
||||||
"_method": "clickup",
|
"_method": "clickup",
|
||||||
"_action": "createTask",
|
"_action": "createTask",
|
||||||
|
|
@ -137,22 +192,21 @@ CLICKUP_NODES = [
|
||||||
"label": t("Aufgabe aktualisieren"),
|
"label": t("Aufgabe aktualisieren"),
|
||||||
"description": t("Felder der Aufgabe ändern"),
|
"description": t("Felder der Aufgabe ändern"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "clickup"},
|
"frontendOptions": {"authority": "clickup"},
|
||||||
"description": t("ClickUp-Verbindung")},
|
"description": t("ClickUp-Verbindung")},
|
||||||
{"name": "taskId", "type": "string", "required": False, "frontendType": "text",
|
{"name": "taskId", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Task-ID")},
|
"description": t("Task-ID")},
|
||||||
{"name": "path", "type": "string", "required": False, "frontendType": "text",
|
{"name": "path", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Oder Pfad")},
|
"description": t("Oder Pfad")},
|
||||||
{"name": "taskUpdateEntries", "type": "object", "required": False, "frontendType": "keyValueRows",
|
{"name": "taskUpdate", "type": "str", "required": False, "frontendType": "json",
|
||||||
"description": t("Zu ändernde Felder")},
|
"description": t("JSON-Body für PUT /task/{id}, z.B. {\"name\":\"...\",\"status\":\"...\"}")},
|
||||||
{"name": "taskUpdate", "type": "string", "required": False, "frontendType": "json",
|
|
||||||
"description": t("JSON für API")},
|
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["TaskResult", "Transit"]}},
|
"inputPorts": {0: {"accepts": ["TaskResult", "Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "TaskResult"}},
|
"outputPorts": {0: {"schema": "TaskResult", "dataPickOptions": TASK_RESULT_DATA_PICK_OPTIONS}},
|
||||||
|
"paramMappers": ["clickupTaskUpdateMerge"],
|
||||||
"meta": {"icon": "mdi-pencil-outline", "color": "#7B68EE", "usesAi": False},
|
"meta": {"icon": "mdi-pencil-outline", "color": "#7B68EE", "usesAi": False},
|
||||||
"_method": "clickup",
|
"_method": "clickup",
|
||||||
"_action": "updateTask",
|
"_action": "updateTask",
|
||||||
|
|
@ -163,20 +217,22 @@ CLICKUP_NODES = [
|
||||||
"label": t("Anhang hochladen"),
|
"label": t("Anhang hochladen"),
|
||||||
"description": t("Datei an Task anhängen"),
|
"description": t("Datei an Task anhängen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "clickup"},
|
"frontendOptions": {"authority": "clickup"},
|
||||||
"description": t("ClickUp-Verbindung")},
|
"description": t("ClickUp-Verbindung")},
|
||||||
{"name": "taskId", "type": "string", "required": False, "frontendType": "text",
|
{"name": "taskId", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Task-ID")},
|
"description": t("Task-ID")},
|
||||||
{"name": "path", "type": "string", "required": False, "frontendType": "text",
|
{"name": "path", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Oder Pfad")},
|
"description": t("Oder Pfad")},
|
||||||
{"name": "fileName", "type": "string", "required": False, "frontendType": "text",
|
{"name": "fileName", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Dateiname")},
|
"description": t("Dateiname")},
|
||||||
|
{"name": "content", "type": "str", "required": True, "frontendType": "hidden",
|
||||||
|
"description": t("Datei-Inhalt aus Upstream-Node (via Wire oder DataRef)"), "default": ""},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-attachment", "color": "#7B68EE", "usesAi": False},
|
"meta": {"icon": "mdi-attachment", "color": "#7B68EE", "usesAi": False},
|
||||||
"_method": "clickup",
|
"_method": "clickup",
|
||||||
"_action": "uploadAttachment",
|
"_action": "uploadAttachment",
|
||||||
|
|
|
||||||
|
|
@ -1,30 +1,376 @@
|
||||||
# Copyright (c) 2025 Patrick Motsch
|
# Copyright (c) 2025 Patrick Motsch
|
||||||
# Context node definitions — structural extraction without AI.
|
# Context node definitions — structural extraction without AI plus
|
||||||
|
# generic key/value, merge, filter and transform helpers.
|
||||||
|
|
||||||
from modules.shared.i18nRegistry import t
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
|
_CONTEXT_INPUT_SCHEMAS = [
|
||||||
|
"Transit",
|
||||||
|
"ActionResult",
|
||||||
|
"AiResult",
|
||||||
|
"MergeResult",
|
||||||
|
"FormPayload",
|
||||||
|
"DocumentList",
|
||||||
|
"EmailList",
|
||||||
|
"TaskList",
|
||||||
|
"FileList",
|
||||||
|
"LoopItem",
|
||||||
|
"UdmDocument",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
_MERGE_RESULT_DATA_PICK_OPTIONS = [
|
||||||
|
{
|
||||||
|
"path": ["merged"],
|
||||||
|
"pickerLabel": t("Zusammengeführt"),
|
||||||
|
"detail": t("Zusammengeführtes Objekt nach gewählter Strategie."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "Dict",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["first"],
|
||||||
|
"pickerLabel": t("Erster Zweig"),
|
||||||
|
"detail": t("Daten vom ersten verbundenen Eingang."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "Any",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["inputs"],
|
||||||
|
"pickerLabel": t("Alle Eingänge"),
|
||||||
|
"detail": t("Dict der Eingabeobjekte nach Port-Index."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "Dict[int,Any]",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["conflicts"],
|
||||||
|
"pickerLabel": t("Konflikte"),
|
||||||
|
"detail": t("Liste der Schlüssel mit Konflikt (nur bei errorOnConflict)."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "List[str]",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
CONTEXT_NODES = [
|
CONTEXT_NODES = [
|
||||||
{
|
{
|
||||||
"id": "context.extractContent",
|
"id": "context.extractContent",
|
||||||
"category": "context",
|
"category": "context",
|
||||||
"label": t("Inhalt extrahieren"),
|
"label": t("Inhalt extrahieren"),
|
||||||
"description": t("Dokumentstruktur extrahieren ohne KI (Seiten, Abschnitte, Bilder, Tabellen)"),
|
"description": t(
|
||||||
|
"Extrahiert Inhalt ohne KI. Ergebnis einheitlich wie KI-Schritte: `response` "
|
||||||
|
"(gesammelter Klartext), strukturierte JSON-Unterlage in `documents[0]`, "
|
||||||
|
"einzelne Bilder als eigene Dokumente `extract_media_*` (nur im Workflow, ohne Eintrag unter „Meine Dateien“) — "
|
||||||
|
"Auswahl im Daten-Picker wie bei `ai.process`."
|
||||||
|
),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "outputDetail", "type": "string", "required": False, "frontendType": "select",
|
{"name": "documentList", "type": "str", "required": True, "frontendType": "hidden",
|
||||||
"frontendOptions": {"options": ["full", "structure", "references"]},
|
"description": t("Dokumentenliste (via Wire oder DataRef)"), "default": "",
|
||||||
"description": t("Detailgrad: full = alles, structure = Skelett, references = Dateireferenzen"),
|
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||||
"default": "full"},
|
|
||||||
{"name": "includeImages", "type": "boolean", "required": False, "frontendType": "checkbox",
|
|
||||||
"description": t("Bilder extrahieren"), "default": True},
|
|
||||||
{"name": "includeTables", "type": "boolean", "required": False, "frontendType": "checkbox",
|
|
||||||
"description": t("Tabellen extrahieren"), "default": True},
|
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
"inputPorts": {0: {"accepts": ["DocumentList", "Transit", "LoopItem"]}},
|
||||||
"outputPorts": {0: {"schema": "UdmDocument"}},
|
"outputPorts": {
|
||||||
|
0: {
|
||||||
|
"schema": "ActionResult",
|
||||||
|
# Authoritative DataPicker paths (same idea as ``parameters`` for configuration).
|
||||||
|
# Frontend uses only this list — no schema expansion merge for this port.
|
||||||
|
"dataPickOptions": [
|
||||||
|
{
|
||||||
|
"path": ["documents", 0, "documentData"],
|
||||||
|
"pickerLabel": t("Gesamter Inhalt"),
|
||||||
|
"detail": t(
|
||||||
|
"Strukturiertes Handover als JSON inklusive aller Textteile "
|
||||||
|
"und Verweisen auf ausgelagerte Bilder."
|
||||||
|
),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "Any",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["response"],
|
||||||
|
"pickerLabel": t("Nur Text"),
|
||||||
|
"detail": t(
|
||||||
|
"Verketteter Klartext aus allen erkannten Textteilen."
|
||||||
|
),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "str",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["imageDocumentsOnly"],
|
||||||
|
"pickerLabel": t("Nur Bilder"),
|
||||||
|
"detail": t(
|
||||||
|
"Nur die extrahierten Bilddokumente als Liste, ohne JSON-Handover."
|
||||||
|
),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "List[ActionDocument]",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["documents"],
|
||||||
|
"pickerLabel": t("Alle Dateitypen"),
|
||||||
|
"detail": t(
|
||||||
|
"Alle Ausgabedokumente nacheinander: JSON-Handover und Bilder."
|
||||||
|
),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "List[ActionDocument]",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
},
|
||||||
"meta": {"icon": "mdi-file-tree-outline", "color": "#00897B", "usesAi": False},
|
"meta": {"icon": "mdi-file-tree-outline", "color": "#00897B", "usesAi": False},
|
||||||
"_method": "context",
|
"_method": "context",
|
||||||
"_action": "extractContent",
|
"_action": "extractContent",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"id": "context.setContext",
|
||||||
|
"category": "context",
|
||||||
|
"label": t("Kontext setzen"),
|
||||||
|
"description": t(
|
||||||
|
"Schreibt in den Workflow-Kontext. Pro Zeile: Ziel-Schlüssel, dann entweder einen "
|
||||||
|
"festen Wert, eine Datenquelle aus dem Graph (Kontext-Picker wie bei anderen Nodes), "
|
||||||
|
"oder eine Aufgabe für einen Benutzer (Human Task) zum Setzen des Werts."
|
||||||
|
),
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"name": "scope",
|
||||||
|
"type": "str",
|
||||||
|
"required": False,
|
||||||
|
"frontendType": "select",
|
||||||
|
"frontendOptions": {"options": ["local", "global", "session"]},
|
||||||
|
"default": "local",
|
||||||
|
"description": t("Speicherbereich"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "assignments",
|
||||||
|
"type": "list",
|
||||||
|
"required": True,
|
||||||
|
"frontendType": "contextAssignments",
|
||||||
|
"default": [],
|
||||||
|
"description": t(
|
||||||
|
"Zuweisungen: Ziel-Schlüssel, Quelle (Picker / fester Wert / Human Task), "
|
||||||
|
"Modus (set, setIfEmpty, append, increment). Optionaler Experten-Pfad `sourcePath` unter der "
|
||||||
|
"gewählten Datenquelle (z. B. payload.status)."
|
||||||
|
),
|
||||||
|
"graphInherit": {"port": 0, "kind": "primaryTextRef"},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"inputs": 1,
|
||||||
|
"outputs": 1,
|
||||||
|
"inputPorts": {0: {"accepts": _CONTEXT_INPUT_SCHEMAS}},
|
||||||
|
"outputPorts": {
|
||||||
|
0: {
|
||||||
|
"schema": "Transit",
|
||||||
|
"dynamic": True,
|
||||||
|
"deriveFrom": "assignments",
|
||||||
|
"deriveNameField": "contextKey",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"injectUpstreamPayload": True,
|
||||||
|
"injectRunContext": True,
|
||||||
|
"surfaceDataAsTopLevel": True,
|
||||||
|
"meta": {"icon": "mdi-database-edit-outline", "color": "#5C6BC0", "usesAi": False},
|
||||||
|
"_method": "context",
|
||||||
|
"_action": "setContext",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "context.mergeContext",
|
||||||
|
"category": "context",
|
||||||
|
"label": t("Kontext zusammenführen"),
|
||||||
|
"description": t(
|
||||||
|
"Wartet auf alle verbundenen eingehenden Branches und führt deren "
|
||||||
|
"Kontext-Daten zu einem einheitlichen MergeResult zusammen. "
|
||||||
|
"Strategien: 'shallow' (oberste Ebene), 'deep' (rekursiv), "
|
||||||
|
"'firstWins' / 'lastWins' bei Konflikten, "
|
||||||
|
"'errorOnConflict' (bricht ab und listet Konflikte). "
|
||||||
|
"Der Node blockiert bis alle erwarteten Inputs eingetroffen sind."
|
||||||
|
),
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"name": "strategy",
|
||||||
|
"type": "str",
|
||||||
|
"required": False,
|
||||||
|
"frontendType": "select",
|
||||||
|
"frontendOptions": {
|
||||||
|
"options": ["shallow", "deep", "firstWins", "lastWins", "errorOnConflict"]
|
||||||
|
},
|
||||||
|
"default": "deep",
|
||||||
|
"description": t("Strategie bei gleichnamigen Keys aus verschiedenen Branches"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "waitFor",
|
||||||
|
"type": "int",
|
||||||
|
"required": False,
|
||||||
|
"frontendType": "number",
|
||||||
|
"default": 0,
|
||||||
|
"description": t(
|
||||||
|
"Anzahl Inputs abwarten (0 = alle verbundenen Branches). "
|
||||||
|
"Hilfreich für optionale Branches mit Timeout."
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "timeoutMs",
|
||||||
|
"type": "int",
|
||||||
|
"required": False,
|
||||||
|
"frontendType": "number",
|
||||||
|
"default": 30000,
|
||||||
|
"description": t(
|
||||||
|
"Maximale Wartezeit in ms — danach wird mit den vorhandenen Inputs fortgesetzt"
|
||||||
|
),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"inputs": 5,
|
||||||
|
"outputs": 1,
|
||||||
|
"inputPorts": {
|
||||||
|
0: {"accepts": _CONTEXT_INPUT_SCHEMAS},
|
||||||
|
1: {"accepts": _CONTEXT_INPUT_SCHEMAS},
|
||||||
|
2: {"accepts": _CONTEXT_INPUT_SCHEMAS},
|
||||||
|
3: {"accepts": _CONTEXT_INPUT_SCHEMAS},
|
||||||
|
4: {"accepts": _CONTEXT_INPUT_SCHEMAS},
|
||||||
|
},
|
||||||
|
"outputPorts": {
|
||||||
|
0: {"schema": "MergeResult", "dataPickOptions": _MERGE_RESULT_DATA_PICK_OPTIONS}
|
||||||
|
},
|
||||||
|
"waitsForAllPredecessors": True,
|
||||||
|
"injectBranchInputs": True,
|
||||||
|
"meta": {"icon": "mdi-call-merge", "color": "#7B1FA2", "usesAi": False},
|
||||||
|
"_method": "context",
|
||||||
|
"_action": "mergeContext",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "context.filterContext",
|
||||||
|
"category": "context",
|
||||||
|
"label": t("Kontext filtern"),
|
||||||
|
"description": t(
|
||||||
|
"Gibt nur bestimmte Felder des eingehenden Datenstroms weiter. "
|
||||||
|
"Modus 'allow': nur diese Keys passieren. "
|
||||||
|
"Modus 'block': diese Keys werden entfernt, alles andere bleibt. "
|
||||||
|
"Unterstützt Pfadausdrücke (z.B. 'user.*', '*.id') und tiefe Pfade ('address.city'). "
|
||||||
|
"Fehlende Keys werden je nach 'missingKeyBehavior' ignoriert, mit null befüllt oder als Fehler behandelt."
|
||||||
|
),
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"name": "mode",
|
||||||
|
"type": "str",
|
||||||
|
"required": False,
|
||||||
|
"frontendType": "select",
|
||||||
|
"frontendOptions": {"options": ["allow", "block"]},
|
||||||
|
"default": "allow",
|
||||||
|
"description": t("Allowlist (nur diese durch) oder Blocklist (diese entfernen)"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "keys",
|
||||||
|
"type": "list",
|
||||||
|
"required": True,
|
||||||
|
"frontendType": "stringList",
|
||||||
|
"default": [],
|
||||||
|
"description": t(
|
||||||
|
"Key-Pfade oder Wildcard-Muster. "
|
||||||
|
"Beispiele: 'response', 'user.*', '*.id', 'address.city'."
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "missingKeyBehavior",
|
||||||
|
"type": "str",
|
||||||
|
"required": False,
|
||||||
|
"frontendType": "select",
|
||||||
|
"frontendOptions": {"options": ["skip", "nullFill", "error"]},
|
||||||
|
"default": "skip",
|
||||||
|
"description": t("Verhalten wenn ein erlaubter Key im Input fehlt"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "preserveMeta",
|
||||||
|
"type": "bool",
|
||||||
|
"required": False,
|
||||||
|
"frontendType": "checkbox",
|
||||||
|
"default": True,
|
||||||
|
"description": t("Interne Meta-Felder (_success, _error, _transit) immer durchlassen"),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"inputs": 1,
|
||||||
|
"outputs": 1,
|
||||||
|
"inputPorts": {0: {"accepts": _CONTEXT_INPUT_SCHEMAS}},
|
||||||
|
"outputPorts": {
|
||||||
|
0: {
|
||||||
|
"schema": "Transit",
|
||||||
|
"dynamic": True,
|
||||||
|
"deriveFrom": "keys",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"injectUpstreamPayload": True,
|
||||||
|
"surfaceDataAsTopLevel": True,
|
||||||
|
"meta": {"icon": "mdi-filter-outline", "color": "#00838F", "usesAi": False},
|
||||||
|
"_method": "context",
|
||||||
|
"_action": "filterContext",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "context.transformContext",
|
||||||
|
"category": "context",
|
||||||
|
"label": t("Kontext transformieren"),
|
||||||
|
"description": t(
|
||||||
|
"Verändert die Struktur des eingehenden Datenstroms. "
|
||||||
|
"Operationen pro Mapping: 'rename' (Key umbenennen), 'cast' (Typ konvertieren), "
|
||||||
|
"'nest' (mehrere Felder unter neuem Objekt zusammenfassen), "
|
||||||
|
"'flatten' (verschachteltes Objekt auf oberste Ebene heben), "
|
||||||
|
"'compute' (neues Feld aus Template-/{{...}}-Ausdruck berechnen). "
|
||||||
|
"Jedes Mapping definiert: 'sourceField' (Eingangspfad / Ausdruck), "
|
||||||
|
"'outputField' (Ausgabe-Key), 'operation' und 'type' (Zieltyp). "
|
||||||
|
"Das Ergebnis ist ein neues Objekt — der ursprüngliche Datenstrom "
|
||||||
|
"wird nicht automatisch weitergegeben (ausser 'passthroughUnmapped: true')."
|
||||||
|
),
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"name": "mappings",
|
||||||
|
"type": "list",
|
||||||
|
"required": True,
|
||||||
|
"frontendType": "mappingTable",
|
||||||
|
"default": [],
|
||||||
|
"description": t(
|
||||||
|
"Liste von Mapping-Einträgen. Jeder Eintrag: "
|
||||||
|
"sourceField (DataRef-Pfad oder Ausdruck), "
|
||||||
|
"outputField (Ziel-Key im Output), "
|
||||||
|
"operation (rename | cast | nest | flatten | compute), "
|
||||||
|
"type (str | int | bool | float | object | list — für cast), "
|
||||||
|
"expression (für compute: Template oder Ausdruck, z.B. '{{firstName}} {{lastName}}')."
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "passthroughUnmapped",
|
||||||
|
"type": "bool",
|
||||||
|
"required": False,
|
||||||
|
"frontendType": "checkbox",
|
||||||
|
"default": False,
|
||||||
|
"description": t(
|
||||||
|
"Alle nicht gemappten Felder des Eingangs zusätzlich in den Output übernehmen."
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "flattenDepth",
|
||||||
|
"type": "int",
|
||||||
|
"required": False,
|
||||||
|
"frontendType": "number",
|
||||||
|
"default": 1,
|
||||||
|
"description": t("Tiefe für flatten-Operation (1 = eine Ebene, -1 = vollständig)"),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"inputs": 1,
|
||||||
|
"outputs": 1,
|
||||||
|
"inputPorts": {0: {"accepts": _CONTEXT_INPUT_SCHEMAS}},
|
||||||
|
"outputPorts": {
|
||||||
|
0: {
|
||||||
|
"schema": {
|
||||||
|
"kind": "fromGraph",
|
||||||
|
"parameter": "mappings",
|
||||||
|
"nameField": "outputField",
|
||||||
|
"schemaName": "Transform_dynamic",
|
||||||
|
},
|
||||||
|
"dynamic": True,
|
||||||
|
"deriveFrom": "mappings",
|
||||||
|
"deriveNameField": "outputField",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"injectUpstreamPayload": True,
|
||||||
|
"surfaceDataAsTopLevel": True,
|
||||||
|
"meta": {"icon": "mdi-swap-horizontal", "color": "#EF6C00", "usesAi": False},
|
||||||
|
"_method": "context",
|
||||||
|
"_action": "transformContext",
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,22 @@
|
||||||
|
# Copyright (c) 2025 Patrick Motsch
|
||||||
|
# Shared parameter copy for ``contextBuilder`` fields (upstream data pick).
|
||||||
|
|
||||||
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
|
CONTEXT_BUILDER_PARAM_DESCRIPTION = t(
|
||||||
|
"Inhalt aus vorherigen Schritten wählen (DataRef / Daten-Picker): z. B. „response“ für Klartext, "
|
||||||
|
"Handover-Pfade für strukturiertes JSON oder Medienlisten. "
|
||||||
|
"Die Auflösung erfolgt vollständig serverseitig (`resolveParameterReferences`). "
|
||||||
|
"Formular-Schritte speichern Antworten unter „payload“ — fehlt ein gewählter Pfad am Root, "
|
||||||
|
"wird derselbe Pfad automatisch unter „payload“ nachgeschlagen (Kompatibilität mit älteren "
|
||||||
|
"und neuen Picker-Pfaden). "
|
||||||
|
"In Freitext-/Template-Feldern werden weiterhin Platzhalter `{{KnotenId.feld.b.z.}}` ersetzt "
|
||||||
|
"(gleiche Semantik inkl. optionalem Nachschlagen unter „payload“)."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Kurzreferenz für Node-Beschreibungen (optional einbinden): dieselbe Auflösungslogik
|
||||||
|
# wie bei DataRefs — kein separates Variablen-Subsystem.
|
||||||
|
REF_AND_TEMPLATE_COMPATIBILITY_SUMMARY = t(
|
||||||
|
"Verweise: typisierte DataRefs im Parameter; Zeichenketten-Templates mit {{…}}; "
|
||||||
|
"Formular-Felder unter output.payload."
|
||||||
|
)
|
||||||
|
|
@ -3,6 +3,25 @@
|
||||||
|
|
||||||
from modules.shared.i18nRegistry import t
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
|
from modules.features.graphicalEditor.nodeDefinitions.ai import CONSOLIDATE_RESULT_DATA_PICK_OPTIONS
|
||||||
|
|
||||||
|
AGGREGATE_RESULT_DATA_PICK_OPTIONS = [
|
||||||
|
{
|
||||||
|
"path": ["items"],
|
||||||
|
"pickerLabel": t("Gesammelte Elemente"),
|
||||||
|
"detail": t("Alle aus der Schleife gesammelten Werte."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "List[Any]",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["count"],
|
||||||
|
"pickerLabel": t("Anzahl"),
|
||||||
|
"detail": t("Anzahl gesammelter Elemente."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "int",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
DATA_NODES = [
|
DATA_NODES = [
|
||||||
{
|
{
|
||||||
"id": "data.aggregate",
|
"id": "data.aggregate",
|
||||||
|
|
@ -10,42 +29,26 @@ DATA_NODES = [
|
||||||
"label": t("Sammeln"),
|
"label": t("Sammeln"),
|
||||||
"description": t("Ergebnisse aus Schleifen-Iterationen sammeln"),
|
"description": t("Ergebnisse aus Schleifen-Iterationen sammeln"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "mode", "type": "string", "required": False, "frontendType": "select",
|
{"name": "mode", "type": "str", "required": False, "frontendType": "select",
|
||||||
"frontendOptions": {"options": ["collect", "concat", "sum", "count"]},
|
"frontendOptions": {"options": ["collect", "concat", "sum", "count"]},
|
||||||
"description": t("Aggregationsmodus"), "default": "collect"},
|
"description": t("Aggregationsmodus"), "default": "collect"},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit", "AiResult", "LoopItem"]}},
|
||||||
"outputPorts": {0: {"schema": "AggregateResult"}},
|
"outputPorts": {0: {"schema": "AggregateResult", "dataPickOptions": AGGREGATE_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"executor": "data",
|
"executor": "data",
|
||||||
"meta": {"icon": "mdi-playlist-plus", "color": "#607D8B", "usesAi": False},
|
"meta": {"icon": "mdi-playlist-plus", "color": "#607D8B", "usesAi": False},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "data.transform",
|
|
||||||
"category": "data",
|
|
||||||
"label": t("Umwandeln"),
|
|
||||||
"description": t("Daten umstrukturieren"),
|
|
||||||
"parameters": [
|
|
||||||
{"name": "mappings", "type": "json", "required": True, "frontendType": "mappingTable",
|
|
||||||
"description": t("Feld-Zuordnungen"), "default": []},
|
|
||||||
],
|
|
||||||
"inputs": 1,
|
|
||||||
"outputs": 1,
|
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
|
||||||
"outputPorts": {0: {"schema": "ActionResult", "dynamic": True, "deriveFrom": "mappings"}},
|
|
||||||
"executor": "data",
|
|
||||||
"meta": {"icon": "mdi-swap-horizontal-bold", "color": "#607D8B", "usesAi": False},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "data.filter",
|
"id": "data.filter",
|
||||||
"category": "data",
|
"category": "data",
|
||||||
"label": t("Filtern"),
|
"label": t("Filtern"),
|
||||||
"description": t("Elemente nach Bedingung filtern"),
|
"description": t("Elemente nach Bedingung filtern"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "condition", "type": "string", "required": True, "frontendType": "filterExpression",
|
{"name": "condition", "type": "str", "required": True, "frontendType": "filterExpression",
|
||||||
"description": t("Filterbedingung")},
|
"description": t("Filterbedingung")},
|
||||||
{"name": "udmContentType", "type": "string", "required": False, "frontendType": "select",
|
{"name": "udmContentType", "type": "str", "required": False, "frontendType": "select",
|
||||||
"frontendOptions": {"options": ["", "text", "image", "table", "code", "media", "link", "formula"]},
|
"frontendOptions": {"options": ["", "text", "image", "table", "code", "media", "link", "formula"]},
|
||||||
"description": t("UDM-ContentType-Filter (optional, leer = kein UDM-Filter)"), "default": ""},
|
"description": t("UDM-ContentType-Filter (optional, leer = kein UDM-Filter)"), "default": ""},
|
||||||
],
|
],
|
||||||
|
|
@ -62,16 +65,16 @@ DATA_NODES = [
|
||||||
"label": t("Konsolidieren"),
|
"label": t("Konsolidieren"),
|
||||||
"description": t("Gesammelte Ergebnisse deterministisch zusammenführen (Tabelle, CSV, Merge)"),
|
"description": t("Gesammelte Ergebnisse deterministisch zusammenführen (Tabelle, CSV, Merge)"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "mode", "type": "string", "required": False, "frontendType": "select",
|
{"name": "mode", "type": "str", "required": False, "frontendType": "select",
|
||||||
"frontendOptions": {"options": ["table", "concat", "merge", "csvJoin"]},
|
"frontendOptions": {"options": ["table", "concat", "merge", "csvJoin"]},
|
||||||
"description": t("Konsolidierungsmodus"), "default": "table"},
|
"description": t("Konsolidierungsmodus"), "default": "table"},
|
||||||
{"name": "separator", "type": "string", "required": False, "frontendType": "text",
|
{"name": "separator", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Trennzeichen (für concat/csvJoin)"), "default": "\n"},
|
"description": t("Trennzeichen (für concat/csvJoin)"), "default": "\n"},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["AggregateResult", "Transit"]}},
|
"inputPorts": {0: {"accepts": ["AggregateResult", "Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "ConsolidateResult"}},
|
"outputPorts": {0: {"schema": "ConsolidateResult", "dataPickOptions": CONSOLIDATE_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"executor": "data",
|
"executor": "data",
|
||||||
"meta": {"icon": "mdi-table-merge-cells", "color": "#607D8B", "usesAi": False},
|
"meta": {"icon": "mdi-table-merge-cells", "color": "#607D8B", "usesAi": False},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,35 @@
|
||||||
|
|
||||||
from modules.shared.i18nRegistry import t
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
|
from modules.features.graphicalEditor.nodeDefinitions.contextPickerHelp import (
|
||||||
|
CONTEXT_BUILDER_PARAM_DESCRIPTION,
|
||||||
|
)
|
||||||
|
from modules.features.graphicalEditor.nodeDefinitions.ai import ACTION_RESULT_DATA_PICK_OPTIONS
|
||||||
|
|
||||||
|
EMAIL_LIST_DATA_PICK_OPTIONS = [
|
||||||
|
{
|
||||||
|
"path": ["emails"],
|
||||||
|
"pickerLabel": t("Alle E-Mails"),
|
||||||
|
"detail": t("Die vollständige E-Mail-Liste des Schritts."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "List[EmailItem]",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["emails", 0],
|
||||||
|
"pickerLabel": t("Erste E-Mail"),
|
||||||
|
"detail": t("Das erste Element der Liste."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "EmailItem",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["count"],
|
||||||
|
"pickerLabel": t("Anzahl"),
|
||||||
|
"detail": t("Anzahl gefundener E-Mails."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "int",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
EMAIL_NODES = [
|
EMAIL_NODES = [
|
||||||
{
|
{
|
||||||
"id": "email.checkEmail",
|
"id": "email.checkEmail",
|
||||||
|
|
@ -10,26 +39,21 @@ EMAIL_NODES = [
|
||||||
"label": t("E-Mail prüfen"),
|
"label": t("E-Mail prüfen"),
|
||||||
"description": t("Neue E-Mails prüfen"),
|
"description": t("Neue E-Mails prüfen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "msft"},
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("E-Mail-Konto Verbindung")},
|
"description": t("E-Mail-Konto Verbindung")},
|
||||||
{"name": "folder", "type": "string", "required": False, "frontendType": "text",
|
{"name": "folder", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Ordner"), "default": "Inbox"},
|
"description": t("Ordner"), "default": "Inbox"},
|
||||||
{"name": "limit", "type": "number", "required": False, "frontendType": "number",
|
{"name": "limit", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Max E-Mails"), "default": 100},
|
"description": t("Max E-Mails"), "default": 100},
|
||||||
{"name": "fromAddress", "type": "string", "required": False, "frontendType": "text",
|
{"name": "filter", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Nur von dieser Adresse"), "default": ""},
|
"description": t("Filter-Ausdruck (z.B. 'from:max@example.com hasAttachment:true betreff')"), "default": ""},
|
||||||
{"name": "subjectContains", "type": "string", "required": False, "frontendType": "text",
|
|
||||||
"description": t("Betreff muss enthalten"), "default": ""},
|
|
||||||
{"name": "hasAttachment", "type": "boolean", "required": False, "frontendType": "checkbox",
|
|
||||||
"description": t("Nur mit Anhängen"), "default": False},
|
|
||||||
{"name": "filter", "type": "string", "required": False, "frontendType": "text",
|
|
||||||
"description": t("Erweitert: Filter-Text"), "default": ""},
|
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "EmailList"}},
|
"outputPorts": {0: {"schema": "EmailList", "dataPickOptions": EMAIL_LIST_DATA_PICK_OPTIONS}},
|
||||||
|
"paramMappers": ["emailCheckFilter"],
|
||||||
"meta": {"icon": "mdi-email-check", "color": "#1976D2", "usesAi": False},
|
"meta": {"icon": "mdi-email-check", "color": "#1976D2", "usesAi": False},
|
||||||
"_method": "outlook",
|
"_method": "outlook",
|
||||||
"_action": "readEmails",
|
"_action": "readEmails",
|
||||||
|
|
@ -40,32 +64,21 @@ EMAIL_NODES = [
|
||||||
"label": t("E-Mail suchen"),
|
"label": t("E-Mail suchen"),
|
||||||
"description": t("E-Mails suchen"),
|
"description": t("E-Mails suchen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "msft"},
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("E-Mail-Konto Verbindung")},
|
"description": t("E-Mail-Konto Verbindung")},
|
||||||
{"name": "query", "type": "string", "required": False, "frontendType": "text",
|
{"name": "query", "type": "str", "required": True, "frontendType": "text",
|
||||||
"description": t("Suchbegriff"), "default": ""},
|
"description": t("Suchausdruck (z.B. 'from:max@example.com hasAttachments:true Rechnung')")},
|
||||||
{"name": "folder", "type": "string", "required": False, "frontendType": "text",
|
{"name": "folder", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Ordner"), "default": "Inbox"},
|
"description": t("Ordner"), "default": "All"},
|
||||||
{"name": "limit", "type": "number", "required": False, "frontendType": "number",
|
{"name": "limit", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Max E-Mails"), "default": 100},
|
"description": t("Max E-Mails"), "default": 100},
|
||||||
{"name": "fromAddress", "type": "string", "required": False, "frontendType": "text",
|
|
||||||
"description": t("Von Adresse"), "default": ""},
|
|
||||||
{"name": "toAddress", "type": "string", "required": False, "frontendType": "text",
|
|
||||||
"description": t("An Adresse"), "default": ""},
|
|
||||||
{"name": "subjectContains", "type": "string", "required": False, "frontendType": "text",
|
|
||||||
"description": t("Betreff enthält"), "default": ""},
|
|
||||||
{"name": "bodyContains", "type": "string", "required": False, "frontendType": "text",
|
|
||||||
"description": t("Inhalt enthält"), "default": ""},
|
|
||||||
{"name": "hasAttachment", "type": "boolean", "required": False, "frontendType": "checkbox",
|
|
||||||
"description": t("Mit Anhängen"), "default": False},
|
|
||||||
{"name": "filter", "type": "string", "required": False, "frontendType": "text",
|
|
||||||
"description": t("Erweitert: KQL-Filter"), "default": ""},
|
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "EmailList"}},
|
"outputPorts": {0: {"schema": "EmailList", "dataPickOptions": EMAIL_LIST_DATA_PICK_OPTIONS}},
|
||||||
|
"paramMappers": ["emailSearchQuery"],
|
||||||
"meta": {"icon": "mdi-email-search", "color": "#1976D2", "usesAi": False},
|
"meta": {"icon": "mdi-email-search", "color": "#1976D2", "usesAi": False},
|
||||||
"_method": "outlook",
|
"_method": "outlook",
|
||||||
"_action": "searchEmails",
|
"_action": "searchEmails",
|
||||||
|
|
@ -74,27 +87,32 @@ EMAIL_NODES = [
|
||||||
"id": "email.draftEmail",
|
"id": "email.draftEmail",
|
||||||
"category": "email",
|
"category": "email",
|
||||||
"label": t("E-Mail entwerfen"),
|
"label": t("E-Mail entwerfen"),
|
||||||
"description": t("E-Mail-Entwurf erstellen"),
|
"description": t(
|
||||||
|
"AI-gestützt einen E-Mail-Entwurf aus Kontext und optionalen Dokumenten erstellen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "msft"},
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("E-Mail-Konto")},
|
"description": t("E-Mail-Konto")},
|
||||||
{"name": "subject", "type": "string", "required": True, "frontendType": "text",
|
{"name": "context", "type": "Any", "required": False, "frontendType": "templateTextarea",
|
||||||
"description": t("Betreff")},
|
"description": CONTEXT_BUILDER_PARAM_DESCRIPTION, "default": "",
|
||||||
{"name": "body", "type": "string", "required": True, "frontendType": "textarea",
|
"graphInherit": {"port": 0, "kind": "primaryTextRef"}},
|
||||||
"description": t("Inhalt")},
|
{"name": "to", "type": "str", "required": False, "frontendType": "text",
|
||||||
{"name": "to", "type": "string", "required": False, "frontendType": "text",
|
"description": t("Empfänger (komma-separiert, optional für Entwurf)"), "default": ""},
|
||||||
"description": t("Empfänger"), "default": ""},
|
{"name": "documentList", "type": "str", "required": False, "frontendType": "hidden",
|
||||||
{"name": "attachments", "type": "json", "required": False, "frontendType": "attachmentBuilder",
|
"description": t("Anhang-Dokumente (via Wire oder DataRef)"), "default": "",
|
||||||
"description": t(
|
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||||
"Anhänge: Liste von { contentRef | csvFromVariable | base64Content, name, mimeType }. "
|
{"name": "emailContent", "type": "str", "required": False, "frontendType": "hidden",
|
||||||
"Per Wire befüllbar (z.B. CSV aus data.consolidate)."),
|
"description": t("Direkt vorbereiteter Inhalt {subject, body, to} (via Wire — überspringt KI)"),
|
||||||
"default": []},
|
"default": ""},
|
||||||
|
{"name": "emailStyle", "type": "str", "required": False, "frontendType": "select",
|
||||||
|
"frontendOptions": {"options": ["formal", "casual", "business"]},
|
||||||
|
"description": t("Stil"), "default": "business"},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["EmailDraft", "AiResult", "Transit", "ConsolidateResult", "DocumentList"]}},
|
"inputPorts": {0: {"accepts": ["EmailDraft", "AiResult", "Transit", "ConsolidateResult", "DocumentList"]}},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
|
"paramMappers": ["emailDraftContextFromSubjectBody"],
|
||||||
"meta": {"icon": "mdi-email-edit", "color": "#1976D2", "usesAi": False},
|
"meta": {"icon": "mdi-email-edit", "color": "#1976D2", "usesAi": False},
|
||||||
"_method": "outlook",
|
"_method": "outlook",
|
||||||
"_action": "composeAndDraftEmailWithContext",
|
"_action": "composeAndDraftEmailWithContext",
|
||||||
|
|
|
||||||
|
|
@ -3,33 +3,38 @@
|
||||||
|
|
||||||
from modules.shared.i18nRegistry import t
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
|
from modules.features.graphicalEditor.nodeDefinitions.contextPickerHelp import (
|
||||||
|
CONTEXT_BUILDER_PARAM_DESCRIPTION,
|
||||||
|
)
|
||||||
|
from modules.features.graphicalEditor.nodeDefinitions.ai import DOCUMENT_LIST_DATA_PICK_OPTIONS
|
||||||
|
|
||||||
FILE_NODES = [
|
FILE_NODES = [
|
||||||
{
|
{
|
||||||
"id": "file.create",
|
"id": "file.create",
|
||||||
"category": "file",
|
"category": "file",
|
||||||
"label": t("Datei erstellen"),
|
"label": t("Datei erstellen"),
|
||||||
"description": t("Erstellt eine Datei aus Kontext (Text/Markdown von KI)."),
|
"description": t(
|
||||||
|
"Erstellt eine Datei aus Kontext. Nach „Inhalt extrahieren“: „response“ für reinen Text; "
|
||||||
|
"„Nur Bilder“ liefert alle extrahierten Bilder — Datei erstellen fasst sie zu einer PDF oder DOCX "
|
||||||
|
"(Ausgabeformat pdf oder docx wählen)."
|
||||||
|
),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "contentSources", "type": "json", "required": False, "frontendType": "json",
|
{"name": "outputFormat", "type": "str", "required": True, "frontendType": "select",
|
||||||
"description": t("Kontext-Quellen"), "default": []},
|
|
||||||
{"name": "outputFormat", "type": "string", "required": True, "frontendType": "select",
|
|
||||||
"frontendOptions": {"options": ["docx", "pdf", "txt", "html", "md"]},
|
"frontendOptions": {"options": ["docx", "pdf", "txt", "html", "md"]},
|
||||||
"description": t("Ausgabeformat"), "default": "docx"},
|
"description": t("Ausgabeformat"), "default": "docx"},
|
||||||
{"name": "title", "type": "string", "required": False, "frontendType": "text",
|
{"name": "title", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Dokumenttitel")},
|
"description": t("Dokumenttitel")},
|
||||||
{"name": "templateName", "type": "string", "required": False, "frontendType": "select",
|
{"name": "folderId", "type": "str", "required": False, "frontendType": "userFileFolder",
|
||||||
"frontendOptions": {"options": ["default", "corporate", "minimal"]},
|
"description": t("Zielordner in Meine Dateien"),
|
||||||
"description": t("Stil-Vorlage")},
|
"default": ""},
|
||||||
{"name": "language", "type": "string", "required": False, "frontendType": "select",
|
{"name": "context", "type": "Any", "required": False, "frontendType": "contextBuilder",
|
||||||
"frontendOptions": {"options": ["de", "en", "fr"]},
|
"description": CONTEXT_BUILDER_PARAM_DESCRIPTION, "default": "",
|
||||||
"description": t("Sprache"), "default": "de"},
|
"graphInherit": {"port": 0, "kind": "primaryTextRef"}},
|
||||||
{"name": "context", "type": "string", "required": False, "frontendType": "hidden",
|
|
||||||
"description": t("Inhalt (via Wire oder DataRef)"), "default": ""},
|
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["AiResult", "TextResult", "Transit"]}},
|
"inputPorts": {0: {"accepts": ["AiResult", "TextResult", "Transit", "FormPayload", "LoopItem", "ActionResult"]}},
|
||||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
"outputPorts": {0: {"schema": "DocumentList", "dataPickOptions": DOCUMENT_LIST_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-file-plus-outline", "color": "#2196F3", "usesAi": False},
|
"meta": {"icon": "mdi-file-plus-outline", "color": "#2196F3", "usesAi": False},
|
||||||
"_method": "file",
|
"_method": "file",
|
||||||
"_action": "create",
|
"_action": "create",
|
||||||
|
|
|
||||||
|
|
@ -3,25 +3,101 @@
|
||||||
|
|
||||||
from modules.shared.i18nRegistry import t
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
|
LOOP_ITEM_DATA_PICK_OPTIONS = [
|
||||||
|
{
|
||||||
|
"path": ["currentItem"],
|
||||||
|
"pickerLabel": t("Aktuelles Element"),
|
||||||
|
"detail": t("Das aktuelle Iterationselement."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "Any",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["currentIndex"],
|
||||||
|
"pickerLabel": t("Aktueller Index"),
|
||||||
|
"detail": t("0-basierter Index der aktuellen Iteration."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "int",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["items"],
|
||||||
|
"pickerLabel": t("Alle Elemente"),
|
||||||
|
"detail": t("Die vollständige Quellliste."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "List[Any]",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["count"],
|
||||||
|
"pickerLabel": t("Gesamtanzahl"),
|
||||||
|
"detail": t("Anzahl der Elemente in der Schleife."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "int",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
MERGE_RESULT_DATA_PICK_OPTIONS = [
|
||||||
|
{
|
||||||
|
"path": ["merged"],
|
||||||
|
"pickerLabel": t("Zusammengeführt"),
|
||||||
|
"detail": t("Zusammengeführtes Ergebnis (je nach Modus)."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "Dict",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["first"],
|
||||||
|
"pickerLabel": t("Erster Zweig"),
|
||||||
|
"detail": t("Daten vom ersten verbundenen Eingang (Modus „first“)."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "Any",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["inputs"],
|
||||||
|
"pickerLabel": t("Alle Eingänge"),
|
||||||
|
"detail": t("Dict der Eingabeobjekte nach Port-Index."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "Dict[int,Any]",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Ports, die typische Schritt-Ausgaben durchreichen (nicht nur leerer Transit).
|
||||||
|
_FLOW_INPUT_SCHEMAS = [
|
||||||
|
"Transit",
|
||||||
|
"FormPayload",
|
||||||
|
"AiResult",
|
||||||
|
"TextResult",
|
||||||
|
"ActionResult",
|
||||||
|
"DocumentList",
|
||||||
|
"FileList",
|
||||||
|
"EmailList",
|
||||||
|
"TaskList",
|
||||||
|
"QueryResult",
|
||||||
|
"MergeResult",
|
||||||
|
"LoopItem",
|
||||||
|
"BoolResult",
|
||||||
|
"UdmDocument",
|
||||||
|
]
|
||||||
|
|
||||||
FLOW_NODES = [
|
FLOW_NODES = [
|
||||||
{
|
{
|
||||||
"id": "flow.ifElse",
|
"id": "flow.ifElse",
|
||||||
"category": "flow",
|
"category": "flow",
|
||||||
"label": t("Wenn / Sonst"),
|
"label": t("Wenn / Sonst"),
|
||||||
"description": t("Verzweigung nach Bedingung"),
|
"description": t(
|
||||||
|
"Verzweigt anhand einer Bedingung auf ein vorheriges Feld oder einen Ausdruck. "
|
||||||
|
"Die Daten vom Eingangskanal werden an den gewählten Ausgang durchgereicht."
|
||||||
|
),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
"name": "condition",
|
"name": "condition",
|
||||||
"type": "string",
|
"type": "json",
|
||||||
"required": True,
|
"required": True,
|
||||||
"frontendType": "condition",
|
"frontendType": "condition",
|
||||||
"description": t("Bedingung"),
|
"description": t("Bedingung: Feld aus einem vorherigen Schritt und Vergleich"),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 2,
|
"outputs": 2,
|
||||||
"outputLabels": [t("Ja"), t("Nein")],
|
"outputLabels": [t("Ja"), t("Nein")],
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": list(_FLOW_INPUT_SCHEMAS)}},
|
||||||
"outputPorts": {0: {"schema": "Transit"}, 1: {"schema": "Transit"}},
|
"outputPorts": {0: {"schema": "Transit"}, 1: {"schema": "Transit"}},
|
||||||
"executor": "flow",
|
"executor": "flow",
|
||||||
"meta": {"icon": "mdi-source-branch", "color": "#FF9800", "usesAi": False},
|
"meta": {"icon": "mdi-source-branch", "color": "#FF9800", "usesAi": False},
|
||||||
|
|
@ -30,26 +106,29 @@ FLOW_NODES = [
|
||||||
"id": "flow.switch",
|
"id": "flow.switch",
|
||||||
"category": "flow",
|
"category": "flow",
|
||||||
"label": t("Switch"),
|
"label": t("Switch"),
|
||||||
"description": t("Mehrere Zweige nach Wert"),
|
"description": t(
|
||||||
|
"Mehrere Zweige nach einem Wert aus einem vorherigen Schritt (Data Picker). "
|
||||||
|
"Definiere Fälle mit Vergleichsoperator; der Eingang wird an den ersten passenden Zweig durchgereicht."
|
||||||
|
),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
"name": "value",
|
"name": "value",
|
||||||
"type": "string",
|
"type": "Any",
|
||||||
"required": True,
|
"required": True,
|
||||||
"frontendType": "text",
|
"frontendType": "dataRef",
|
||||||
"description": t("Zu vergleichender Wert"),
|
"description": t("Wert zum Vergleichen (Feld aus einem vorherigen Schritt)"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "cases",
|
"name": "cases",
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"required": False,
|
"required": False,
|
||||||
"frontendType": "caseList",
|
"frontendType": "caseList",
|
||||||
"description": t("Fälle"),
|
"description": t("Fälle: Operator und Vergleichswert"),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": list(_FLOW_INPUT_SCHEMAS)}},
|
||||||
"outputPorts": {0: {"schema": "Transit"}},
|
"outputPorts": {0: {"schema": "Transit"}},
|
||||||
"executor": "flow",
|
"executor": "flow",
|
||||||
"meta": {"icon": "mdi-swap-horizontal", "color": "#FF9800", "usesAi": False},
|
"meta": {"icon": "mdi-swap-horizontal", "color": "#FF9800", "usesAi": False},
|
||||||
|
|
@ -57,39 +136,45 @@ FLOW_NODES = [
|
||||||
{
|
{
|
||||||
"id": "flow.loop",
|
"id": "flow.loop",
|
||||||
"category": "flow",
|
"category": "flow",
|
||||||
"label": t("Schleife / Für Jedes"),
|
"label": t("Schleife / Für jedes"),
|
||||||
"description": t("Über Array-Elemente oder UDM-Strukturebenen iterieren"),
|
"description": t(
|
||||||
|
"Iteriert über ein Array aus einem vorherigen Schritt (z. B. documente, Zeilen, Listeneinträge). "
|
||||||
|
"Optional: UDM-Ebene für strukturierte Dokumente."
|
||||||
|
),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
"name": "items",
|
"name": "items",
|
||||||
"type": "string",
|
"type": "Any",
|
||||||
"required": True,
|
"required": True,
|
||||||
"frontendType": "text",
|
"frontendType": "dataRef",
|
||||||
"description": t("Pfad zum Array"),
|
"description": t("Liste oder Sammlung zum Durchlaufen (im Data Picker wählen)"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "level",
|
"name": "level",
|
||||||
"type": "string",
|
"type": "str",
|
||||||
"required": False,
|
"required": False,
|
||||||
"frontendType": "select",
|
"frontendType": "select",
|
||||||
"frontendOptions": {"options": ["auto", "documents", "structuralNodes", "contentBlocks"]},
|
"frontendOptions": {"options": ["auto", "documents", "structuralNodes", "contentBlocks"]},
|
||||||
"description": t("UDM-Iterationsebene"),
|
"description": t("Nur bei UDM-Daten: welche Strukturebene als Elemente verwendet wird"),
|
||||||
"default": "auto",
|
"default": "auto",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "concurrency",
|
"name": "concurrency",
|
||||||
"type": "number",
|
"type": "int",
|
||||||
"required": False,
|
"required": False,
|
||||||
"frontendType": "number",
|
"frontendType": "number",
|
||||||
"frontendOptions": {"min": 1, "max": 20},
|
"frontendOptions": {"min": 1, "max": 20},
|
||||||
"description": t("Parallele Iterationen (1 = sequentiell)"),
|
"description": t("Parallele Durchläufe (1 = nacheinander)"),
|
||||||
"default": 1,
|
"default": 1,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit", "UdmDocument"]}},
|
"inputPorts": {0: {"accepts": [
|
||||||
"outputPorts": {0: {"schema": "LoopItem"}},
|
"Transit", "UdmDocument", "EmailList", "DocumentList", "FileList", "TaskList",
|
||||||
|
"ActionResult", "AiResult", "QueryResult", "FormPayload",
|
||||||
|
]}},
|
||||||
|
"outputPorts": {0: {"schema": "LoopItem", "dataPickOptions": LOOP_ITEM_DATA_PICK_OPTIONS}},
|
||||||
"executor": "flow",
|
"executor": "flow",
|
||||||
"meta": {"icon": "mdi-repeat", "color": "#FF9800", "usesAi": False},
|
"meta": {"icon": "mdi-repeat", "color": "#FF9800", "usesAi": False},
|
||||||
},
|
},
|
||||||
|
|
@ -97,31 +182,37 @@ FLOW_NODES = [
|
||||||
"id": "flow.merge",
|
"id": "flow.merge",
|
||||||
"category": "flow",
|
"category": "flow",
|
||||||
"label": t("Zusammenführen"),
|
"label": t("Zusammenführen"),
|
||||||
"description": t("Mehrere Zweige zusammenführen (2-5 Eingänge)"),
|
"description": t(
|
||||||
|
"Führt 2–5 Zweige zusammen, wenn alle verbunden sind. "
|
||||||
|
"Modus legt fest, wie die Eingabeobjekte im Ergebnis kombiniert werden."
|
||||||
|
),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
"name": "mode",
|
"name": "mode",
|
||||||
"type": "string",
|
"type": "str",
|
||||||
"required": False,
|
"required": False,
|
||||||
"frontendType": "select",
|
"frontendType": "select",
|
||||||
"frontendOptions": {"options": ["first", "all", "append"]},
|
"frontendOptions": {"options": ["first", "all", "append"]},
|
||||||
"description": t("Zusammenführungsmodus"),
|
"description": t("first: erster Zweig; all: Dict-Felder zusammenführen; append: Listen anhängen"),
|
||||||
"default": "first",
|
"default": "first",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "inputCount",
|
"name": "inputCount",
|
||||||
"type": "number",
|
"type": "int",
|
||||||
"required": False,
|
"required": False,
|
||||||
"frontendType": "number",
|
"frontendType": "number",
|
||||||
"frontendOptions": {"min": 2, "max": 5},
|
"frontendOptions": {"min": 2, "max": 5},
|
||||||
"description": t("Anzahl Eingänge"),
|
"description": t("Anzahl Eingänge dieses Nodes (2–5)"),
|
||||||
"default": 2,
|
"default": 2,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
"inputs": 2,
|
"inputs": 2,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}, 1: {"accepts": ["Transit"]}},
|
"inputPorts": {
|
||||||
"outputPorts": {0: {"schema": "MergeResult"}},
|
0: {"accepts": list(_FLOW_INPUT_SCHEMAS)},
|
||||||
|
1: {"accepts": list(_FLOW_INPUT_SCHEMAS)},
|
||||||
|
},
|
||||||
|
"outputPorts": {0: {"schema": "MergeResult", "dataPickOptions": MERGE_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"executor": "flow",
|
"executor": "flow",
|
||||||
"meta": {"icon": "mdi-call-merge", "color": "#FF9800", "usesAi": False},
|
"meta": {"icon": "mdi-call-merge", "color": "#FF9800", "usesAi": False},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,47 @@
|
||||||
|
|
||||||
from modules.shared.i18nRegistry import t
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
|
from modules.features.graphicalEditor.nodeDefinitions.ai import DOCUMENT_LIST_DATA_PICK_OPTIONS
|
||||||
|
|
||||||
|
BOOL_RESULT_DATA_PICK_OPTIONS = [
|
||||||
|
{
|
||||||
|
"path": ["result"],
|
||||||
|
"pickerLabel": t("Ergebnis"),
|
||||||
|
"detail": t("Boolesches Ergebnis (z. B. Genehmigung ja/nein)."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "bool",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["reason"],
|
||||||
|
"pickerLabel": t("Begründung"),
|
||||||
|
"detail": t("Optionale textuelle Begründung."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "str",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
TEXT_RESULT_DATA_PICK_OPTIONS = [
|
||||||
|
{
|
||||||
|
"path": ["text"],
|
||||||
|
"pickerLabel": t("Text"),
|
||||||
|
"detail": t("Vom Benutzer eingegebener oder gewählter Text."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "str",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Canonical form field types — single source of truth.
|
||||||
|
# portType maps to the PORT_TYPE_CATALOG primitive used by DataPicker / validateGraph.
|
||||||
|
FORM_FIELD_TYPES = [
|
||||||
|
{"id": "text", "label": "Text (einzeilig)", "portType": "str"},
|
||||||
|
{"id": "textarea", "label": "Text (mehrzeilig)", "portType": "str"},
|
||||||
|
{"id": "number", "label": "Zahl", "portType": "int"},
|
||||||
|
{"id": "boolean", "label": "Ja/Nein", "portType": "bool"},
|
||||||
|
{"id": "date", "label": "Datum", "portType": "str"},
|
||||||
|
{"id": "email", "label": "E-Mail", "portType": "str"},
|
||||||
|
{"id": "select", "label": "Auswahl", "portType": "str"},
|
||||||
|
]
|
||||||
|
|
||||||
INPUT_NODES = [
|
INPUT_NODES = [
|
||||||
{
|
{
|
||||||
"id": "input.form",
|
"id": "input.form",
|
||||||
|
|
@ -22,7 +63,7 @@ INPUT_NODES = [
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "FormPayload", "dynamic": True, "deriveFrom": "fields"}},
|
"outputPorts": {0: {"schema": {"kind": "fromGraph", "parameter": "fields"}}},
|
||||||
"executor": "input",
|
"executor": "input",
|
||||||
"meta": {"icon": "mdi-form-textbox", "color": "#9C27B0", "usesAi": False},
|
"meta": {"icon": "mdi-form-textbox", "color": "#9C27B0", "usesAi": False},
|
||||||
},
|
},
|
||||||
|
|
@ -32,18 +73,18 @@ INPUT_NODES = [
|
||||||
"label": t("Genehmigung"),
|
"label": t("Genehmigung"),
|
||||||
"description": t("Benutzer genehmigt oder lehnt ab"),
|
"description": t("Benutzer genehmigt oder lehnt ab"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "title", "type": "string", "required": True, "frontendType": "text",
|
{"name": "title", "type": "str", "required": True, "frontendType": "text",
|
||||||
"description": t("Genehmigungstitel")},
|
"description": t("Genehmigungstitel")},
|
||||||
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
|
{"name": "description", "type": "str", "required": False, "frontendType": "textarea",
|
||||||
"description": t("Was genehmigt werden soll")},
|
"description": t("Was genehmigt werden soll")},
|
||||||
{"name": "approvalType", "type": "string", "required": False, "frontendType": "select",
|
{"name": "approvalType", "type": "str", "required": False, "frontendType": "select",
|
||||||
"frontendOptions": {"options": ["generic", "document"]},
|
"frontendOptions": {"options": ["generic", "document"]},
|
||||||
"description": t("Typ: document oder generic"), "default": "generic"},
|
"description": t("Typ: document oder generic"), "default": "generic"},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "BoolResult"}},
|
"outputPorts": {0: {"schema": "BoolResult", "dataPickOptions": BOOL_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"executor": "input",
|
"executor": "input",
|
||||||
"meta": {"icon": "mdi-check-decagram", "color": "#4CAF50", "usesAi": False},
|
"meta": {"icon": "mdi-check-decagram", "color": "#4CAF50", "usesAi": False},
|
||||||
},
|
},
|
||||||
|
|
@ -53,20 +94,20 @@ INPUT_NODES = [
|
||||||
"label": t("Upload"),
|
"label": t("Upload"),
|
||||||
"description": t("Benutzer lädt Datei(en) hoch"),
|
"description": t("Benutzer lädt Datei(en) hoch"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "accept", "type": "string", "required": False, "frontendType": "text",
|
{"name": "accept", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Accept-String"), "default": ""},
|
"description": t("Accept-String"), "default": ""},
|
||||||
{"name": "allowedTypes", "type": "json", "required": False, "frontendType": "multiselect",
|
{"name": "allowedTypes", "type": "json", "required": False, "frontendType": "multiselect",
|
||||||
"frontendOptions": {"options": ["pdf", "docx", "xlsx", "pptx", "txt", "csv", "jpg", "png", "gif"]},
|
"frontendOptions": {"options": ["pdf", "docx", "xlsx", "pptx", "txt", "csv", "jpg", "png", "gif"]},
|
||||||
"description": t("Ausgewählte Dateitypen"), "default": []},
|
"description": t("Ausgewählte Dateitypen"), "default": []},
|
||||||
{"name": "maxSize", "type": "number", "required": False, "frontendType": "number",
|
{"name": "maxSize", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Max. Dateigröße in MB"), "default": 10},
|
"description": t("Max. Dateigröße in MB"), "default": 10},
|
||||||
{"name": "multiple", "type": "boolean", "required": False, "frontendType": "checkbox",
|
{"name": "multiple", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||||
"description": t("Mehrere Dateien erlauben"), "default": False},
|
"description": t("Mehrere Dateien erlauben"), "default": False},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
"outputPorts": {0: {"schema": "DocumentList", "dataPickOptions": DOCUMENT_LIST_DATA_PICK_OPTIONS}},
|
||||||
"executor": "input",
|
"executor": "input",
|
||||||
"meta": {"icon": "mdi-upload", "color": "#2196F3", "usesAi": False},
|
"meta": {"icon": "mdi-upload", "color": "#2196F3", "usesAi": False},
|
||||||
},
|
},
|
||||||
|
|
@ -76,15 +117,15 @@ INPUT_NODES = [
|
||||||
"label": t("Kommentar"),
|
"label": t("Kommentar"),
|
||||||
"description": t("Benutzer fügt einen Kommentar hinzu"),
|
"description": t("Benutzer fügt einen Kommentar hinzu"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "placeholder", "type": "string", "required": False, "frontendType": "text",
|
{"name": "placeholder", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Platzhalter"), "default": ""},
|
"description": t("Platzhalter"), "default": ""},
|
||||||
{"name": "required", "type": "boolean", "required": False, "frontendType": "checkbox",
|
{"name": "required", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||||
"description": t("Kommentar erforderlich"), "default": True},
|
"description": t("Kommentar erforderlich"), "default": True},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "TextResult"}},
|
"outputPorts": {0: {"schema": "TextResult", "dataPickOptions": TEXT_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"executor": "input",
|
"executor": "input",
|
||||||
"meta": {"icon": "mdi-comment-text", "color": "#FF9800", "usesAi": False},
|
"meta": {"icon": "mdi-comment-text", "color": "#FF9800", "usesAi": False},
|
||||||
},
|
},
|
||||||
|
|
@ -94,16 +135,16 @@ INPUT_NODES = [
|
||||||
"label": t("Prüfung"),
|
"label": t("Prüfung"),
|
||||||
"description": t("Benutzer prüft Inhalt"),
|
"description": t("Benutzer prüft Inhalt"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "contentRef", "type": "string", "required": True, "frontendType": "text",
|
{"name": "contentRef", "type": "str", "required": True, "frontendType": "text",
|
||||||
"description": t("Referenz auf Inhalt")},
|
"description": t("Referenz auf Inhalt")},
|
||||||
{"name": "reviewType", "type": "string", "required": False, "frontendType": "select",
|
{"name": "reviewType", "type": "str", "required": False, "frontendType": "select",
|
||||||
"frontendOptions": {"options": ["generic", "document"]},
|
"frontendOptions": {"options": ["generic", "document"]},
|
||||||
"description": t("Art der Prüfung"), "default": "generic"},
|
"description": t("Art der Prüfung"), "default": "generic"},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "BoolResult"}},
|
"outputPorts": {0: {"schema": "BoolResult", "dataPickOptions": BOOL_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"executor": "input",
|
"executor": "input",
|
||||||
"meta": {"icon": "mdi-magnify-scan", "color": "#673AB7", "usesAi": False},
|
"meta": {"icon": "mdi-magnify-scan", "color": "#673AB7", "usesAi": False},
|
||||||
},
|
},
|
||||||
|
|
@ -115,13 +156,13 @@ INPUT_NODES = [
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "options", "type": "json", "required": True, "frontendType": "keyValueRows",
|
{"name": "options", "type": "json", "required": True, "frontendType": "keyValueRows",
|
||||||
"description": t("Optionen"), "default": []},
|
"description": t("Optionen"), "default": []},
|
||||||
{"name": "multiple", "type": "boolean", "required": False, "frontendType": "checkbox",
|
{"name": "multiple", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||||
"description": t("Mehrfachauswahl erlauben"), "default": False},
|
"description": t("Mehrfachauswahl erlauben"), "default": False},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "TextResult"}},
|
"outputPorts": {0: {"schema": "TextResult", "dataPickOptions": TEXT_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"executor": "input",
|
"executor": "input",
|
||||||
"meta": {"icon": "mdi-format-list-checks", "color": "#009688", "usesAi": False},
|
"meta": {"icon": "mdi-format-list-checks", "color": "#009688", "usesAi": False},
|
||||||
},
|
},
|
||||||
|
|
@ -131,17 +172,17 @@ INPUT_NODES = [
|
||||||
"label": t("Bestätigung"),
|
"label": t("Bestätigung"),
|
||||||
"description": t("Benutzer bestätigt Ja/Nein"),
|
"description": t("Benutzer bestätigt Ja/Nein"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "question", "type": "string", "required": True, "frontendType": "text",
|
{"name": "question", "type": "str", "required": True, "frontendType": "text",
|
||||||
"description": t("Zu bestätigende Frage")},
|
"description": t("Zu bestätigende Frage")},
|
||||||
{"name": "confirmLabel", "type": "string", "required": False, "frontendType": "text",
|
{"name": "confirmLabel", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Label für Bestätigen-Button"), "default": "Confirm"},
|
"description": t("Label für Bestätigen-Button"), "default": "Confirm"},
|
||||||
{"name": "rejectLabel", "type": "string", "required": False, "frontendType": "text",
|
{"name": "rejectLabel", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Label für Ablehnen-Button"), "default": "Reject"},
|
"description": t("Label für Ablehnen-Button"), "default": "Reject"},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "BoolResult"}},
|
"outputPorts": {0: {"schema": "BoolResult", "dataPickOptions": BOOL_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"executor": "input",
|
"executor": "input",
|
||||||
"meta": {"icon": "mdi-checkbox-marked-circle", "color": "#8BC34A", "usesAi": False},
|
"meta": {"icon": "mdi-checkbox-marked-circle", "color": "#8BC34A", "usesAi": False},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,21 @@
|
||||||
|
|
||||||
from modules.shared.i18nRegistry import t
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
|
from modules.features.graphicalEditor.nodeDefinitions.ai import ACTION_RESULT_DATA_PICK_OPTIONS
|
||||||
|
|
||||||
|
# Typed FeatureInstance binding (replaces legacy `string, hidden`).
|
||||||
|
# - type FeatureInstanceRef[redmine] is filtered by the DataPicker.
|
||||||
|
# - frontendType "featureInstance" is rendered by FeatureInstancePicker which
|
||||||
|
# loads /options/feature.instance?featureCode=redmine for the current mandate.
|
||||||
|
_REDMINE_INSTANCE_PARAM = {
|
||||||
|
"name": "featureInstanceId",
|
||||||
|
"type": "FeatureInstanceRef[redmine]",
|
||||||
|
"required": True,
|
||||||
|
"frontendType": "featureInstance",
|
||||||
|
"frontendOptions": {"featureCode": "redmine"},
|
||||||
|
"description": t("Redmine-Mandant"),
|
||||||
|
}
|
||||||
|
|
||||||
REDMINE_NODES = [
|
REDMINE_NODES = [
|
||||||
{
|
{
|
||||||
"id": "redmine.readTicket",
|
"id": "redmine.readTicket",
|
||||||
|
|
@ -11,15 +26,14 @@ REDMINE_NODES = [
|
||||||
"label": t("Ticket lesen"),
|
"label": t("Ticket lesen"),
|
||||||
"description": t("Einzelnes Redmine-Ticket aus dem Mirror laden."),
|
"description": t("Einzelnes Redmine-Ticket aus dem Mirror laden."),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
dict(_REDMINE_INSTANCE_PARAM),
|
||||||
"description": t("Redmine Feature-Instanz-ID")},
|
{"name": "ticketId", "type": "int", "required": True, "frontendType": "number",
|
||||||
{"name": "ticketId", "type": "number", "required": True, "frontendType": "number",
|
|
||||||
"description": t("Redmine-Ticket-ID")},
|
"description": t("Redmine-Ticket-ID")},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-ticket-outline", "color": "#4A6FA5", "usesAi": False},
|
"meta": {"icon": "mdi-ticket-outline", "color": "#4A6FA5", "usesAi": False},
|
||||||
"_method": "redmine",
|
"_method": "redmine",
|
||||||
"_action": "readTicket",
|
"_action": "readTicket",
|
||||||
|
|
@ -30,25 +44,24 @@ REDMINE_NODES = [
|
||||||
"label": t("Tickets auflisten"),
|
"label": t("Tickets auflisten"),
|
||||||
"description": t("Tickets aus dem lokalen Mirror mit Filtern (Tracker, Status, Zeitraum, Zuweisung)."),
|
"description": t("Tickets aus dem lokalen Mirror mit Filtern (Tracker, Status, Zeitraum, Zuweisung)."),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
dict(_REDMINE_INSTANCE_PARAM),
|
||||||
"description": t("Redmine Feature-Instanz-ID")},
|
{"name": "trackerIds", "type": "str", "required": False, "frontendType": "text",
|
||||||
{"name": "trackerIds", "type": "string", "required": False, "frontendType": "text",
|
|
||||||
"description": t("Tracker-IDs (Komma-separiert)"), "default": ""},
|
"description": t("Tracker-IDs (Komma-separiert)"), "default": ""},
|
||||||
{"name": "status", "type": "string", "required": False, "frontendType": "text",
|
{"name": "status", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Status-Filter: open | closed | *"), "default": "*"},
|
"description": t("Status-Filter: open | closed | *"), "default": "*"},
|
||||||
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
|
{"name": "dateFrom", "type": "str", "required": False, "frontendType": "date",
|
||||||
"description": t("Zeitraum ab (ISO-Datum)"), "default": ""},
|
"description": t("Zeitraum ab (ISO-Datum)"), "default": ""},
|
||||||
{"name": "dateTo", "type": "string", "required": False, "frontendType": "date",
|
{"name": "dateTo", "type": "str", "required": False, "frontendType": "date",
|
||||||
"description": t("Zeitraum bis (ISO-Datum)"), "default": ""},
|
"description": t("Zeitraum bis (ISO-Datum)"), "default": ""},
|
||||||
{"name": "assignedToId", "type": "number", "required": False, "frontendType": "number",
|
{"name": "assignedToId", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Nur Tickets dieses Benutzers (ID)")},
|
"description": t("Nur Tickets dieses Benutzers (ID)")},
|
||||||
{"name": "limit", "type": "number", "required": False, "frontendType": "number",
|
{"name": "limit", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Max. Anzahl Tickets (1-500)"), "default": 100},
|
"description": t("Max. Anzahl Tickets (1-500)"), "default": 100},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-format-list-bulleted", "color": "#4A6FA5", "usesAi": False},
|
"meta": {"icon": "mdi-format-list-bulleted", "color": "#4A6FA5", "usesAi": False},
|
||||||
"_method": "redmine",
|
"_method": "redmine",
|
||||||
"_action": "listTickets",
|
"_action": "listTickets",
|
||||||
|
|
@ -59,29 +72,28 @@ REDMINE_NODES = [
|
||||||
"label": t("Ticket erstellen"),
|
"label": t("Ticket erstellen"),
|
||||||
"description": t("Neues Ticket in Redmine anlegen. Mirror wird sofort aktualisiert."),
|
"description": t("Neues Ticket in Redmine anlegen. Mirror wird sofort aktualisiert."),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
dict(_REDMINE_INSTANCE_PARAM),
|
||||||
"description": t("Redmine Feature-Instanz-ID")},
|
{"name": "subject", "type": "str", "required": True, "frontendType": "text",
|
||||||
{"name": "subject", "type": "string", "required": True, "frontendType": "text",
|
|
||||||
"description": t("Ticket-Titel")},
|
"description": t("Ticket-Titel")},
|
||||||
{"name": "trackerId", "type": "number", "required": True, "frontendType": "number",
|
{"name": "trackerId", "type": "int", "required": True, "frontendType": "number",
|
||||||
"description": t("Tracker-ID (Userstory, Feature, Task, ...)")},
|
"description": t("Tracker-ID (Userstory, Feature, Task, ...)")},
|
||||||
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
|
{"name": "description", "type": "str", "required": False, "frontendType": "textarea",
|
||||||
"description": t("Ticket-Beschreibung"), "default": ""},
|
"description": t("Ticket-Beschreibung"), "default": ""},
|
||||||
{"name": "statusId", "type": "number", "required": False, "frontendType": "number",
|
{"name": "statusId", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Status-ID (optional)")},
|
"description": t("Status-ID (optional)")},
|
||||||
{"name": "priorityId", "type": "number", "required": False, "frontendType": "number",
|
{"name": "priorityId", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Prioritaet-ID (optional)")},
|
"description": t("Prioritaet-ID (optional)")},
|
||||||
{"name": "assignedToId", "type": "number", "required": False, "frontendType": "number",
|
{"name": "assignedToId", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Zugewiesene Benutzer-ID (optional)")},
|
"description": t("Zugewiesene Benutzer-ID (optional)")},
|
||||||
{"name": "parentIssueId", "type": "number", "required": False, "frontendType": "number",
|
{"name": "parentIssueId", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Uebergeordnetes Ticket (optional)")},
|
"description": t("Uebergeordnetes Ticket (optional)")},
|
||||||
{"name": "customFields", "type": "string", "required": False, "frontendType": "textarea",
|
{"name": "customFields", "type": "str", "required": False, "frontendType": "textarea",
|
||||||
"description": t("Custom Fields als JSON {id: value}"), "default": ""},
|
"description": t("Custom Fields als JSON {id: value}"), "default": ""},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-ticket-plus-outline", "color": "#4A6FA5", "usesAi": False},
|
"meta": {"icon": "mdi-ticket-plus-outline", "color": "#4A6FA5", "usesAi": False},
|
||||||
"_method": "redmine",
|
"_method": "redmine",
|
||||||
"_action": "createTicket",
|
"_action": "createTicket",
|
||||||
|
|
@ -92,33 +104,32 @@ REDMINE_NODES = [
|
||||||
"label": t("Ticket bearbeiten"),
|
"label": t("Ticket bearbeiten"),
|
||||||
"description": t("Felder eines Redmine-Tickets aktualisieren. Nur gesetzte Felder werden uebertragen."),
|
"description": t("Felder eines Redmine-Tickets aktualisieren. Nur gesetzte Felder werden uebertragen."),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
dict(_REDMINE_INSTANCE_PARAM),
|
||||||
"description": t("Redmine Feature-Instanz-ID")},
|
{"name": "ticketId", "type": "int", "required": True, "frontendType": "number",
|
||||||
{"name": "ticketId", "type": "number", "required": True, "frontendType": "number",
|
|
||||||
"description": t("Ticket-ID")},
|
"description": t("Ticket-ID")},
|
||||||
{"name": "subject", "type": "string", "required": False, "frontendType": "text",
|
{"name": "subject", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Neuer Titel")},
|
"description": t("Neuer Titel")},
|
||||||
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
|
{"name": "description", "type": "str", "required": False, "frontendType": "textarea",
|
||||||
"description": t("Neue Beschreibung")},
|
"description": t("Neue Beschreibung")},
|
||||||
{"name": "trackerId", "type": "number", "required": False, "frontendType": "number",
|
{"name": "trackerId", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Neuer Tracker")},
|
"description": t("Neuer Tracker")},
|
||||||
{"name": "statusId", "type": "number", "required": False, "frontendType": "number",
|
{"name": "statusId", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Neuer Status")},
|
"description": t("Neuer Status")},
|
||||||
{"name": "priorityId", "type": "number", "required": False, "frontendType": "number",
|
{"name": "priorityId", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Neue Prioritaet")},
|
"description": t("Neue Prioritaet")},
|
||||||
{"name": "assignedToId", "type": "number", "required": False, "frontendType": "number",
|
{"name": "assignedToId", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Neue Zuweisung")},
|
"description": t("Neue Zuweisung")},
|
||||||
{"name": "parentIssueId", "type": "number", "required": False, "frontendType": "number",
|
{"name": "parentIssueId", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Neues Parent-Ticket")},
|
"description": t("Neues Parent-Ticket")},
|
||||||
{"name": "notes", "type": "string", "required": False, "frontendType": "textarea",
|
{"name": "notes", "type": "str", "required": False, "frontendType": "textarea",
|
||||||
"description": t("Kommentar (Journal-Eintrag)"), "default": ""},
|
"description": t("Kommentar (Journal-Eintrag)"), "default": ""},
|
||||||
{"name": "customFields", "type": "string", "required": False, "frontendType": "textarea",
|
{"name": "customFields", "type": "str", "required": False, "frontendType": "textarea",
|
||||||
"description": t("Custom Fields als JSON {id: value}"), "default": ""},
|
"description": t("Custom Fields als JSON {id: value}"), "default": ""},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-ticket-confirmation-outline", "color": "#4A6FA5", "usesAi": False},
|
"meta": {"icon": "mdi-ticket-confirmation-outline", "color": "#4A6FA5", "usesAi": False},
|
||||||
"_method": "redmine",
|
"_method": "redmine",
|
||||||
"_action": "updateTicket",
|
"_action": "updateTicket",
|
||||||
|
|
@ -129,21 +140,20 @@ REDMINE_NODES = [
|
||||||
"label": t("Statistik laden"),
|
"label": t("Statistik laden"),
|
||||||
"description": t("Aggregierte Kennzahlen (KPIs, Durchsatz, Status-Verteilung, Backlog) aus dem Mirror."),
|
"description": t("Aggregierte Kennzahlen (KPIs, Durchsatz, Status-Verteilung, Backlog) aus dem Mirror."),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
dict(_REDMINE_INSTANCE_PARAM),
|
||||||
"description": t("Redmine Feature-Instanz-ID")},
|
{"name": "dateFrom", "type": "str", "required": False, "frontendType": "date",
|
||||||
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
|
|
||||||
"description": t("Zeitraum ab")},
|
"description": t("Zeitraum ab")},
|
||||||
{"name": "dateTo", "type": "string", "required": False, "frontendType": "date",
|
{"name": "dateTo", "type": "str", "required": False, "frontendType": "date",
|
||||||
"description": t("Zeitraum bis")},
|
"description": t("Zeitraum bis")},
|
||||||
{"name": "bucket", "type": "string", "required": False, "frontendType": "text",
|
{"name": "bucket", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Bucket: day | week | month"), "default": "week"},
|
"description": t("Bucket: day | week | month"), "default": "week"},
|
||||||
{"name": "trackerIds", "type": "string", "required": False, "frontendType": "text",
|
{"name": "trackerIds", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Tracker-IDs (Komma-separiert)"), "default": ""},
|
"description": t("Tracker-IDs (Komma-separiert)"), "default": ""},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-chart-bar", "color": "#4A6FA5", "usesAi": False},
|
"meta": {"icon": "mdi-chart-bar", "color": "#4A6FA5", "usesAi": False},
|
||||||
"_method": "redmine",
|
"_method": "redmine",
|
||||||
"_action": "getStats",
|
"_action": "getStats",
|
||||||
|
|
@ -154,15 +164,14 @@ REDMINE_NODES = [
|
||||||
"label": t("Mirror synchronisieren"),
|
"label": t("Mirror synchronisieren"),
|
||||||
"description": t("Tickets und Beziehungen aus Redmine in den lokalen Mirror uebernehmen."),
|
"description": t("Tickets und Beziehungen aus Redmine in den lokalen Mirror uebernehmen."),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
dict(_REDMINE_INSTANCE_PARAM),
|
||||||
"description": t("Redmine Feature-Instanz-ID")},
|
{"name": "force", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||||
{"name": "force", "type": "boolean", "required": False, "frontendType": "checkbox",
|
|
||||||
"description": t("Vollsync erzwingen (ignoriert lastSyncAt)"), "default": False},
|
"description": t("Vollsync erzwingen (ignoriert lastSyncAt)"), "default": False},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-database-sync", "color": "#4A6FA5", "usesAi": False},
|
"meta": {"icon": "mdi-database-sync", "color": "#4A6FA5", "usesAi": False},
|
||||||
"_method": "redmine",
|
"_method": "redmine",
|
||||||
"_action": "runSync",
|
"_action": "runSync",
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,35 @@
|
||||||
|
|
||||||
from modules.shared.i18nRegistry import t
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
|
from modules.features.graphicalEditor.nodeDefinitions.ai import (
|
||||||
|
ACTION_RESULT_DATA_PICK_OPTIONS,
|
||||||
|
DOCUMENT_LIST_DATA_PICK_OPTIONS,
|
||||||
|
)
|
||||||
|
|
||||||
|
FILE_LIST_DATA_PICK_OPTIONS = [
|
||||||
|
{
|
||||||
|
"path": ["files"],
|
||||||
|
"pickerLabel": t("Alle Dateien"),
|
||||||
|
"detail": t("Die vollständige Dateiliste."),
|
||||||
|
"recommended": True,
|
||||||
|
"type": "List[FileItem]",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["files", 0],
|
||||||
|
"pickerLabel": t("Erste Datei"),
|
||||||
|
"detail": t("Das erste Listenelement."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "FileItem",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ["count"],
|
||||||
|
"pickerLabel": t("Anzahl"),
|
||||||
|
"detail": t("Anzahl der Dateien."),
|
||||||
|
"recommended": False,
|
||||||
|
"type": "int",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
SHAREPOINT_NODES = [
|
SHAREPOINT_NODES = [
|
||||||
{
|
{
|
||||||
"id": "sharepoint.findFile",
|
"id": "sharepoint.findFile",
|
||||||
|
|
@ -10,20 +39,20 @@ SHAREPOINT_NODES = [
|
||||||
"label": t("Datei finden"),
|
"label": t("Datei finden"),
|
||||||
"description": t("Datei nach Pfad oder Suche finden"),
|
"description": t("Datei nach Pfad oder Suche finden"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "msft"},
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("SharePoint-Verbindung")},
|
"description": t("SharePoint-Verbindung")},
|
||||||
{"name": "searchQuery", "type": "string", "required": True, "frontendType": "text",
|
{"name": "searchQuery", "type": "str", "required": True, "frontendType": "text",
|
||||||
"description": t("Suchanfrage oder Pfad")},
|
"description": t("Suchanfrage oder Pfad")},
|
||||||
{"name": "site", "type": "string", "required": False, "frontendType": "text",
|
{"name": "site", "type": "str", "required": False, "frontendType": "text",
|
||||||
"description": t("Optionaler Site-Hinweis"), "default": ""},
|
"description": t("Optionaler Site-Hinweis"), "default": ""},
|
||||||
{"name": "maxResults", "type": "number", "required": False, "frontendType": "number",
|
{"name": "maxResults", "type": "int", "required": False, "frontendType": "number",
|
||||||
"description": t("Max Ergebnisse"), "default": 1000},
|
"description": t("Max Ergebnisse"), "default": 1000},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "FileList"}},
|
"outputPorts": {0: {"schema": "FileList", "dataPickOptions": FILE_LIST_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-file-search", "color": "#0078D4", "usesAi": False},
|
"meta": {"icon": "mdi-file-search", "color": "#0078D4", "usesAi": False},
|
||||||
"_method": "sharepoint",
|
"_method": "sharepoint",
|
||||||
"_action": "findDocumentPath",
|
"_action": "findDocumentPath",
|
||||||
|
|
@ -34,17 +63,17 @@ SHAREPOINT_NODES = [
|
||||||
"label": t("Datei lesen"),
|
"label": t("Datei lesen"),
|
||||||
"description": t("Inhalt aus Datei extrahieren"),
|
"description": t("Inhalt aus Datei extrahieren"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "msft"},
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("SharePoint-Verbindung")},
|
"description": t("SharePoint-Verbindung")},
|
||||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFile",
|
{"name": "pathQuery", "type": "str", "required": True, "frontendType": "sharepointFile",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
"description": t("Dateipfad")},
|
"description": t("Dateipfad")},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["FileList", "Transit"]}},
|
"inputPorts": {0: {"accepts": ["FileList", "Transit", "LoopItem"]}},
|
||||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
"outputPorts": {0: {"schema": "DocumentList", "dataPickOptions": DOCUMENT_LIST_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-file-document", "color": "#0078D4", "usesAi": False},
|
"meta": {"icon": "mdi-file-document", "color": "#0078D4", "usesAi": False},
|
||||||
"_method": "sharepoint",
|
"_method": "sharepoint",
|
||||||
"_action": "readDocuments",
|
"_action": "readDocuments",
|
||||||
|
|
@ -55,17 +84,19 @@ SHAREPOINT_NODES = [
|
||||||
"label": t("Datei hochladen"),
|
"label": t("Datei hochladen"),
|
||||||
"description": t("Datei zu SharePoint hochladen"),
|
"description": t("Datei zu SharePoint hochladen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "msft"},
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("SharePoint-Verbindung")},
|
"description": t("SharePoint-Verbindung")},
|
||||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFolder",
|
{"name": "pathQuery", "type": "str", "required": True, "frontendType": "sharepointFolder",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
"description": t("Zielordner-Pfad")},
|
"description": t("Zielordner-Pfad")},
|
||||||
|
{"name": "content", "type": "str", "required": True, "frontendType": "hidden",
|
||||||
|
"description": t("Datei-Inhalt aus Upstream-Node (via Wire oder DataRef)"), "default": ""},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-upload", "color": "#0078D4", "usesAi": False},
|
"meta": {"icon": "mdi-upload", "color": "#0078D4", "usesAi": False},
|
||||||
"_method": "sharepoint",
|
"_method": "sharepoint",
|
||||||
"_action": "uploadFile",
|
"_action": "uploadFile",
|
||||||
|
|
@ -76,17 +107,17 @@ SHAREPOINT_NODES = [
|
||||||
"label": t("Dateien auflisten"),
|
"label": t("Dateien auflisten"),
|
||||||
"description": t("Dateien in Ordner auflisten"),
|
"description": t("Dateien in Ordner auflisten"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "msft"},
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("SharePoint-Verbindung")},
|
"description": t("SharePoint-Verbindung")},
|
||||||
{"name": "pathQuery", "type": "string", "required": False, "frontendType": "sharepointFolder",
|
{"name": "pathQuery", "type": "str", "required": False, "frontendType": "sharepointFolder",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
"description": t("Ordnerpfad"), "default": "/"},
|
"description": t("Ordnerpfad"), "default": "/"},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "FileList"}},
|
"outputPorts": {0: {"schema": "FileList", "dataPickOptions": FILE_LIST_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-folder-open", "color": "#0078D4", "usesAi": False},
|
"meta": {"icon": "mdi-folder-open", "color": "#0078D4", "usesAi": False},
|
||||||
"_method": "sharepoint",
|
"_method": "sharepoint",
|
||||||
"_action": "listDocuments",
|
"_action": "listDocuments",
|
||||||
|
|
@ -97,17 +128,17 @@ SHAREPOINT_NODES = [
|
||||||
"label": t("Datei herunterladen"),
|
"label": t("Datei herunterladen"),
|
||||||
"description": t("Datei vom Pfad herunterladen"),
|
"description": t("Datei vom Pfad herunterladen"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "msft"},
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("SharePoint-Verbindung")},
|
"description": t("SharePoint-Verbindung")},
|
||||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFile",
|
{"name": "pathQuery", "type": "str", "required": True, "frontendType": "sharepointFile",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
"description": t("Vollständiger Dateipfad")},
|
"description": t("Vollständiger Dateipfad")},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["FileList", "Transit"]}},
|
"inputPorts": {0: {"accepts": ["FileList", "Transit", "LoopItem"]}},
|
||||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
"outputPorts": {0: {"schema": "DocumentList", "dataPickOptions": DOCUMENT_LIST_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-download", "color": "#0078D4", "usesAi": False},
|
"meta": {"icon": "mdi-download", "color": "#0078D4", "usesAi": False},
|
||||||
"_method": "sharepoint",
|
"_method": "sharepoint",
|
||||||
"_action": "downloadFileByPath",
|
"_action": "downloadFileByPath",
|
||||||
|
|
@ -118,20 +149,20 @@ SHAREPOINT_NODES = [
|
||||||
"label": t("Datei kopieren"),
|
"label": t("Datei kopieren"),
|
||||||
"description": t("Datei an Ziel kopieren"),
|
"description": t("Datei an Ziel kopieren"),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "msft"},
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("SharePoint-Verbindung")},
|
"description": t("SharePoint-Verbindung")},
|
||||||
{"name": "sourcePath", "type": "string", "required": True, "frontendType": "sharepointFile",
|
{"name": "sourcePath", "type": "str", "required": True, "frontendType": "sharepointFile",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
"description": t("Quelldatei-Pfad")},
|
"description": t("Quelldatei-Pfad")},
|
||||||
{"name": "destPath", "type": "string", "required": True, "frontendType": "sharepointFolder",
|
{"name": "destPath", "type": "str", "required": True, "frontendType": "sharepointFolder",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
"description": t("Zielordner")},
|
"description": t("Zielordner")},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-content-copy", "color": "#0078D4", "usesAi": False},
|
"meta": {"icon": "mdi-content-copy", "color": "#0078D4", "usesAi": False},
|
||||||
"_method": "sharepoint",
|
"_method": "sharepoint",
|
||||||
"_action": "copyFile",
|
"_action": "copyFile",
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,8 @@
|
||||||
|
|
||||||
from modules.shared.i18nRegistry import t
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
|
from modules.features.graphicalEditor.nodeDefinitions.ai import ACTION_RESULT_DATA_PICK_OPTIONS
|
||||||
|
|
||||||
TRIGGER_NODES = [
|
TRIGGER_NODES = [
|
||||||
{
|
{
|
||||||
"id": "trigger.manual",
|
"id": "trigger.manual",
|
||||||
|
|
@ -13,7 +15,7 @@ TRIGGER_NODES = [
|
||||||
"inputs": 0,
|
"inputs": 0,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {},
|
"inputPorts": {},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"executor": "trigger",
|
"executor": "trigger",
|
||||||
"meta": {"icon": "mdi-play", "color": "#4CAF50", "usesAi": False},
|
"meta": {"icon": "mdi-play", "color": "#4CAF50", "usesAi": False},
|
||||||
},
|
},
|
||||||
|
|
@ -34,7 +36,7 @@ TRIGGER_NODES = [
|
||||||
"inputs": 0,
|
"inputs": 0,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {},
|
"inputPorts": {},
|
||||||
"outputPorts": {0: {"schema": "FormPayload", "dynamic": True, "deriveFrom": "formFields"}},
|
"outputPorts": {0: {"schema": {"kind": "fromGraph", "parameter": "formFields"}}},
|
||||||
"executor": "trigger",
|
"executor": "trigger",
|
||||||
"meta": {"icon": "mdi-form-select", "color": "#9C27B0", "usesAi": False},
|
"meta": {"icon": "mdi-form-select", "color": "#9C27B0", "usesAi": False},
|
||||||
},
|
},
|
||||||
|
|
@ -46,7 +48,7 @@ TRIGGER_NODES = [
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{
|
{
|
||||||
"name": "cron",
|
"name": "cron",
|
||||||
"type": "string",
|
"type": "str",
|
||||||
"required": False,
|
"required": False,
|
||||||
"frontendType": "cron",
|
"frontendType": "cron",
|
||||||
"description": t("Cron-Ausdruck"),
|
"description": t("Cron-Ausdruck"),
|
||||||
|
|
@ -55,7 +57,7 @@ TRIGGER_NODES = [
|
||||||
"inputs": 0,
|
"inputs": 0,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {},
|
"inputPorts": {},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"executor": "trigger",
|
"executor": "trigger",
|
||||||
"meta": {"icon": "mdi-clock", "color": "#2196F3", "usesAi": False},
|
"meta": {"icon": "mdi-clock", "color": "#2196F3", "usesAi": False},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,22 @@
|
||||||
|
|
||||||
from modules.shared.i18nRegistry import t
|
from modules.shared.i18nRegistry import t
|
||||||
|
|
||||||
|
from modules.features.graphicalEditor.nodeDefinitions.ai import ACTION_RESULT_DATA_PICK_OPTIONS
|
||||||
|
|
||||||
|
# Typed FeatureInstance binding (replaces legacy `string, hidden`).
|
||||||
|
# - type uses the discriminator notation `FeatureInstanceRef[<code>]` so the
|
||||||
|
# DataPicker / RequiredAttributePicker can filter compatible upstream paths.
|
||||||
|
# - frontendType "featureInstance" is rendered by FeatureInstancePicker which
|
||||||
|
# loads /options/feature.instance?featureCode=trustee for the current mandate.
|
||||||
|
_TRUSTEE_INSTANCE_PARAM = {
|
||||||
|
"name": "featureInstanceId",
|
||||||
|
"type": "FeatureInstanceRef[trustee]",
|
||||||
|
"required": True,
|
||||||
|
"frontendType": "featureInstance",
|
||||||
|
"frontendOptions": {"featureCode": "trustee"},
|
||||||
|
"description": t("Trustee-Mandant"),
|
||||||
|
}
|
||||||
|
|
||||||
TRUSTEE_NODES = [
|
TRUSTEE_NODES = [
|
||||||
{
|
{
|
||||||
"id": "trustee.refreshAccountingData",
|
"id": "trustee.refreshAccountingData",
|
||||||
|
|
@ -10,19 +26,18 @@ TRUSTEE_NODES = [
|
||||||
"label": t("Buchhaltungsdaten aktualisieren"),
|
"label": t("Buchhaltungsdaten aktualisieren"),
|
||||||
"description": t("Buchhaltungsdaten aus externem System importieren/aktualisieren."),
|
"description": t("Buchhaltungsdaten aus externem System importieren/aktualisieren."),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||||
"description": t("Trustee Feature-Instanz-ID")},
|
{"name": "forceRefresh", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||||
{"name": "forceRefresh", "type": "boolean", "required": False, "frontendType": "checkbox",
|
|
||||||
"description": t("Import erzwingen"), "default": False},
|
"description": t("Import erzwingen"), "default": False},
|
||||||
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
|
{"name": "dateFrom", "type": "str", "required": False, "frontendType": "date",
|
||||||
"description": t("Startdatum"), "default": ""},
|
"description": t("Startdatum"), "default": ""},
|
||||||
{"name": "dateTo", "type": "string", "required": False, "frontendType": "date",
|
{"name": "dateTo", "type": "str", "required": False, "frontendType": "date",
|
||||||
"description": t("Enddatum"), "default": ""},
|
"description": t("Enddatum"), "default": ""},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-database-refresh", "color": "#4CAF50", "usesAi": False},
|
"meta": {"icon": "mdi-database-refresh", "color": "#4CAF50", "usesAi": False},
|
||||||
"_method": "trustee",
|
"_method": "trustee",
|
||||||
"_action": "refreshAccountingData",
|
"_action": "refreshAccountingData",
|
||||||
|
|
@ -33,21 +48,23 @@ TRUSTEE_NODES = [
|
||||||
"label": t("Dokumente extrahieren"),
|
"label": t("Dokumente extrahieren"),
|
||||||
"description": t("Dokumenttyp und Daten aus PDF/JPG per AI extrahieren."),
|
"description": t("Dokumenttyp und Daten aus PDF/JPG per AI extrahieren."),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "connectionReference", "type": "string", "required": False, "frontendType": "userConnection",
|
{"name": "connectionReference", "type": "str", "required": False, "frontendType": "userConnection",
|
||||||
"frontendOptions": {"authority": "msft"},
|
"frontendOptions": {"authority": "msft"},
|
||||||
"description": t("SharePoint-Verbindung"), "default": ""},
|
"description": t("SharePoint-Verbindung"), "default": ""},
|
||||||
{"name": "sharepointFolder", "type": "string", "required": False, "frontendType": "sharepointFolder",
|
{"name": "sharepointFolder", "type": "str", "required": False, "frontendType": "sharepointFolder",
|
||||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||||
"description": t("SharePoint-Ordnerpfad"), "default": ""},
|
"description": t("SharePoint-Ordnerpfad"), "default": ""},
|
||||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||||
"description": t("Trustee Feature-Instanz-ID")},
|
{"name": "prompt", "type": "str", "required": False, "frontendType": "textarea",
|
||||||
{"name": "prompt", "type": "string", "required": False, "frontendType": "textarea",
|
|
||||||
"description": t("AI-Prompt für Extraktion"), "default": ""},
|
"description": t("AI-Prompt für Extraktion"), "default": ""},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
"inputPorts": {0: {"accepts": ["DocumentList", "Transit", "AiResult", "LoopItem", "ActionResult"]}},
|
||||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
# Runtime returns ActionResult.isSuccess(documents=[...]) — see
|
||||||
|
# actions/extractFromFiles.py. Declaring DocumentList here was adapter
|
||||||
|
# drift and broke the DataPicker for downstream nodes.
|
||||||
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-file-document-scan", "color": "#4CAF50", "usesAi": True},
|
"meta": {"icon": "mdi-file-document-scan", "color": "#4CAF50", "usesAi": True},
|
||||||
"_method": "trustee",
|
"_method": "trustee",
|
||||||
"_action": "extractFromFiles",
|
"_action": "extractFromFiles",
|
||||||
|
|
@ -58,15 +75,18 @@ TRUSTEE_NODES = [
|
||||||
"label": t("Dokumente verarbeiten"),
|
"label": t("Dokumente verarbeiten"),
|
||||||
"description": t("TrusteeDocument + TrusteePosition aus Extraktionsergebnis erstellen."),
|
"description": t("TrusteeDocument + TrusteePosition aus Extraktionsergebnis erstellen."),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "documentList", "type": "string", "required": False, "frontendType": "hidden",
|
# Type matches what producers actually emit: ActionResult.documents
|
||||||
"description": t("Automatisch via Wire-Verbindung befüllt")},
|
# is List[ActionDocument] (see datamodelChat.ActionResult). The
|
||||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
# DataPicker uses this string to filter compatible upstream paths.
|
||||||
"description": t("Trustee Feature-Instanz-ID")},
|
{"name": "documentList", "type": "List[ActionDocument]", "required": True, "frontendType": "dataRef",
|
||||||
|
"description": t("Dokumente aus vorherigen Schritten"),
|
||||||
|
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||||
|
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
"inputPorts": {0: {"accepts": ["ActionResult", "DocumentList", "Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-file-document-check", "color": "#4CAF50", "usesAi": False},
|
"meta": {"icon": "mdi-file-document-check", "color": "#4CAF50", "usesAi": False},
|
||||||
"_method": "trustee",
|
"_method": "trustee",
|
||||||
"_action": "processDocuments",
|
"_action": "processDocuments",
|
||||||
|
|
@ -77,15 +97,15 @@ TRUSTEE_NODES = [
|
||||||
"label": t("In Buchhaltung synchronisieren"),
|
"label": t("In Buchhaltung synchronisieren"),
|
||||||
"description": t("Trustee-Positionen in Buchhaltungssystem übertragen."),
|
"description": t("Trustee-Positionen in Buchhaltungssystem übertragen."),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "documentList", "type": "string", "required": False, "frontendType": "hidden",
|
{"name": "documentList", "type": "List[ActionDocument]", "required": True, "frontendType": "dataRef",
|
||||||
"description": t("Automatisch via Wire-Verbindung befüllt")},
|
"description": t("Dokumente aus vorherigen Schritten"),
|
||||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||||
"description": t("Trustee Feature-Instanz-ID")},
|
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
"inputPorts": {0: {"accepts": ["ActionResult", "DocumentList", "Transit"]}},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-calculator", "color": "#4CAF50", "usesAi": False},
|
"meta": {"icon": "mdi-calculator", "color": "#4CAF50", "usesAi": False},
|
||||||
"_method": "trustee",
|
"_method": "trustee",
|
||||||
"_action": "syncToAccounting",
|
"_action": "syncToAccounting",
|
||||||
|
|
@ -96,34 +116,33 @@ TRUSTEE_NODES = [
|
||||||
"label": t("Treuhand-Daten abfragen"),
|
"label": t("Treuhand-Daten abfragen"),
|
||||||
"description": t("Daten aus der Trustee-DB lesen (Lookup, Aggregation, Roh-Export). Pendant zu refreshAccountingData ohne externen Sync."),
|
"description": t("Daten aus der Trustee-DB lesen (Lookup, Aggregation, Roh-Export). Pendant zu refreshAccountingData ohne externen Sync."),
|
||||||
"parameters": [
|
"parameters": [
|
||||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||||
"description": t("Trustee Feature-Instanz-ID")},
|
{"name": "mode", "type": "str", "required": True, "frontendType": "select",
|
||||||
{"name": "mode", "type": "string", "required": True, "frontendType": "select",
|
|
||||||
"frontendOptions": {"options": ["lookup", "raw", "aggregate"]},
|
"frontendOptions": {"options": ["lookup", "raw", "aggregate"]},
|
||||||
"description": t("Abfragemodus"), "default": "lookup"},
|
"description": t("Abfragemodus"), "default": "lookup"},
|
||||||
{"name": "entity", "type": "string", "required": True, "frontendType": "select",
|
{"name": "entity", "type": "str", "required": True, "frontendType": "select",
|
||||||
"frontendOptions": {"options": ["tenantWithRent", "contact", "journalLines", "accounts", "balances"]},
|
"frontendOptions": {"options": ["tenantWithRent", "contact", "journalLines", "accounts", "balances"]},
|
||||||
"description": t("Entität, die gelesen werden soll"), "default": "tenantWithRent"},
|
"description": t("Entität, die gelesen werden soll"), "default": "tenantWithRent"},
|
||||||
{"name": "tenantNameRef", "type": "string", "required": False, "frontendType": "text",
|
{"name": "tenantNameRef", "type": "str", "required": False, "frontendType": "text",
|
||||||
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent", "contact"]},
|
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent", "contact"]},
|
||||||
"description": t("Mietername (oder {{wire.feld}} aus Upstream)"), "default": ""},
|
"description": t("Mietername (oder {{wire.feld}} aus Upstream)"), "default": ""},
|
||||||
{"name": "tenantAddressRef", "type": "string", "required": False, "frontendType": "text",
|
{"name": "tenantAddressRef", "type": "str", "required": False, "frontendType": "text",
|
||||||
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent", "contact"]},
|
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent", "contact"]},
|
||||||
"description": t("Mieteradresse (Toleranz für Tippfehler)"), "default": ""},
|
"description": t("Mieteradresse (Toleranz für Tippfehler)"), "default": ""},
|
||||||
{"name": "period", "type": "string", "required": False, "frontendType": "text",
|
{"name": "period", "type": "str", "required": False, "frontendType": "text",
|
||||||
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent", "journalLines", "balances"]},
|
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent", "journalLines", "balances"]},
|
||||||
"description": t("Zeitraum (YYYY oder YYYY-MM-DD/YYYY-MM-DD)"), "default": ""},
|
"description": t("Zeitraum (YYYY oder YYYY-MM-DD/YYYY-MM-DD)"), "default": ""},
|
||||||
{"name": "rentAccountPattern", "type": "string", "required": False, "frontendType": "text",
|
{"name": "rentAccountPattern", "type": "str", "required": False, "frontendType": "text",
|
||||||
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent"]},
|
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent"]},
|
||||||
"description": t("Konto-Filter für Mietzins (z.B. '6000-6099' oder '6*')"), "default": ""},
|
"description": t("Konto-Filter für Mietzins (z.B. '6000-6099' oder '6*')"), "default": ""},
|
||||||
{"name": "filterJson", "type": "string", "required": False, "frontendType": "textarea",
|
{"name": "filterJson", "type": "str", "required": False, "frontendType": "textarea",
|
||||||
"frontendOptions": {"dependsOn": "mode", "showWhen": ["raw", "aggregate"]},
|
"frontendOptions": {"dependsOn": "mode", "showWhen": ["raw", "aggregate"]},
|
||||||
"description": t("Optionaler JSON-Filter für mode=raw/aggregate"), "default": ""},
|
"description": t("Optionaler JSON-Filter für mode=raw/aggregate"), "default": ""},
|
||||||
],
|
],
|
||||||
"inputs": 1,
|
"inputs": 1,
|
||||||
"outputs": 1,
|
"outputs": 1,
|
||||||
"inputPorts": {0: {"accepts": ["Transit", "AiResult", "ConsolidateResult"]}},
|
"inputPorts": {0: {"accepts": ["Transit", "AiResult", "ConsolidateResult", "UdmDocument"]}},
|
||||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||||
"meta": {"icon": "mdi-database-search", "color": "#4CAF50", "usesAi": False},
|
"meta": {"icon": "mdi-database-search", "color": "#4CAF50", "usesAi": False},
|
||||||
"_method": "trustee",
|
"_method": "trustee",
|
||||||
"_action": "queryData",
|
"_action": "queryData",
|
||||||
|
|
|
||||||
|
|
@ -6,9 +6,11 @@ Nodes are defined first; IO/method actions are used at execution time.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict, List, Any
|
from typing import Dict, List, Any, Optional
|
||||||
|
|
||||||
from modules.features.graphicalEditor.nodeDefinitions import STATIC_NODE_TYPES
|
from modules.features.graphicalEditor.nodeDefinitions import STATIC_NODE_TYPES
|
||||||
|
from modules.features.graphicalEditor.nodeDefinitions.input import FORM_FIELD_TYPES
|
||||||
|
from modules.features.graphicalEditor.nodeAdapter import bindsActionFromLegacy
|
||||||
from modules.features.graphicalEditor.portTypes import PORT_TYPE_CATALOG, SYSTEM_VARIABLES
|
from modules.features.graphicalEditor.portTypes import PORT_TYPE_CATALOG, SYSTEM_VARIABLES
|
||||||
from modules.shared.i18nRegistry import normalizePrimaryLanguageTag, resolveText
|
from modules.shared.i18nRegistry import normalizePrimaryLanguageTag, resolveText
|
||||||
|
|
||||||
|
|
@ -41,12 +43,21 @@ def _pickFromLangMap(d: Any, lang: str) -> Any:
|
||||||
|
|
||||||
|
|
||||||
def _localizeNode(node: Dict[str, Any], language: str) -> Dict[str, Any]:
|
def _localizeNode(node: Dict[str, Any], language: str) -> Dict[str, Any]:
|
||||||
"""Apply request language via resolveText (t() keys + multilingual dicts)."""
|
"""Apply request language via resolveText (t() keys + multilingual dicts).
|
||||||
|
|
||||||
|
Also exposes Schicht-3 metadata (`bindsAction`) derived from the legacy
|
||||||
|
`_method`/`_action` pair, so frontend consumers can resolve back to the
|
||||||
|
Schicht-2 Action signature without parsing internal underscore-prefixed
|
||||||
|
fields.
|
||||||
|
"""
|
||||||
lang = normalizePrimaryLanguageTag(language, "en")
|
lang = normalizePrimaryLanguageTag(language, "en")
|
||||||
|
bindsAction = bindsActionFromLegacy(node)
|
||||||
out = dict(node)
|
out = dict(node)
|
||||||
for key in list(out.keys()):
|
for key in list(out.keys()):
|
||||||
if key.startswith("_"):
|
if key.startswith("_"):
|
||||||
del out[key]
|
del out[key]
|
||||||
|
if bindsAction:
|
||||||
|
out["bindsAction"] = bindsAction
|
||||||
lbl = node.get("label")
|
lbl = node.get("label")
|
||||||
if lbl is not None:
|
if lbl is not None:
|
||||||
out["label"] = resolveText(lbl, lang) or node.get("id", "")
|
out["label"] = resolveText(lbl, lang) or node.get("id", "")
|
||||||
|
|
@ -71,6 +82,34 @@ def _localizeNode(node: Dict[str, Any], language: str) -> Dict[str, Any]:
|
||||||
pc["description"] = resolveText(pd, lang)
|
pc["description"] = resolveText(pd, lang)
|
||||||
params.append(pc)
|
params.append(pc)
|
||||||
out["parameters"] = params
|
out["parameters"] = params
|
||||||
|
|
||||||
|
out_ports: Dict[Any, Dict[str, Any]] = {}
|
||||||
|
for idx, po in (node.get("outputPorts") or {}).items():
|
||||||
|
if not isinstance(po, dict):
|
||||||
|
continue
|
||||||
|
port_copy = dict(po)
|
||||||
|
opts = port_copy.get("dataPickOptions")
|
||||||
|
if isinstance(opts, list):
|
||||||
|
loc_opts: List[Dict[str, Any]] = []
|
||||||
|
for o in opts:
|
||||||
|
if not isinstance(o, dict):
|
||||||
|
continue
|
||||||
|
oc = dict(o)
|
||||||
|
pl = oc.get("pickerLabel")
|
||||||
|
if pl is not None:
|
||||||
|
oc["pickerLabel"] = resolveText(pl, lang)
|
||||||
|
dt = oc.get("detail")
|
||||||
|
if dt is not None:
|
||||||
|
oc["detail"] = resolveText(dt, lang)
|
||||||
|
loc_opts.append(oc)
|
||||||
|
port_copy["dataPickOptions"] = loc_opts
|
||||||
|
out_ports[idx] = port_copy
|
||||||
|
if isinstance(node.get("outputPorts"), dict):
|
||||||
|
out["outputPorts"] = out_ports
|
||||||
|
|
||||||
|
# Legacy node-level key no longer used — do not expose.
|
||||||
|
out.pop("outputPickHints", None)
|
||||||
|
|
||||||
return out
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -101,7 +140,7 @@ def getNodeTypesForApi(
|
||||||
for name, schema in PORT_TYPE_CATALOG.items():
|
for name, schema in PORT_TYPE_CATALOG.items():
|
||||||
catalogSerialized[name] = {
|
catalogSerialized[name] = {
|
||||||
"name": schema.name,
|
"name": schema.name,
|
||||||
"fields": [f.model_dump() for f in schema.fields],
|
"fields": [f.model_dump(by_alias=True, exclude_none=True) for f in schema.fields],
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|
@ -109,6 +148,7 @@ def getNodeTypesForApi(
|
||||||
"categories": categories,
|
"categories": categories,
|
||||||
"portTypeCatalog": catalogSerialized,
|
"portTypeCatalog": catalogSerialized,
|
||||||
"systemVariables": SYSTEM_VARIABLES,
|
"systemVariables": SYSTEM_VARIABLES,
|
||||||
|
"formFieldTypes": FORM_FIELD_TYPES,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -124,3 +164,46 @@ def getNodeTypeToMethodAction() -> Dict[str, tuple]:
|
||||||
if method and action:
|
if method and action:
|
||||||
mapping[node["id"]] = (method, action)
|
mapping[node["id"]] = (method, action)
|
||||||
return mapping
|
return mapping
|
||||||
|
|
||||||
|
|
||||||
|
def validateAdaptersAgainstMethods(methodInstances: Optional[Dict[str, Any]] = None) -> Optional[str]:
|
||||||
|
"""Run the Schicht-3 Adapter validator (5 drift rules) against the live methods.
|
||||||
|
|
||||||
|
Intended to be called once at startup after methodDiscovery has populated
|
||||||
|
the methods registry. Returns a human-readable report (None when healthy)
|
||||||
|
so the caller decides whether to log, raise, or surface to operators.
|
||||||
|
|
||||||
|
Pass `methodInstances` directly for testability; defaults to importing
|
||||||
|
the live registry from `methodDiscovery.methods`.
|
||||||
|
"""
|
||||||
|
from modules.features.graphicalEditor.adapterValidator import (
|
||||||
|
_buildActionsRegistryFromMethods,
|
||||||
|
_formatAdapterReport,
|
||||||
|
_validateAllAdapters,
|
||||||
|
)
|
||||||
|
|
||||||
|
if methodInstances is None:
|
||||||
|
try:
|
||||||
|
from modules.workflows.processing.shared.methodDiscovery import methods
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning("Adapter validator skipped: cannot import methodDiscovery (%s)", exc)
|
||||||
|
return None
|
||||||
|
|
||||||
|
methodInstances = {}
|
||||||
|
for fullName, info in (methods or {}).items():
|
||||||
|
shortName = fullName.replace("Method", "").lower() if fullName[:1].isupper() else fullName
|
||||||
|
instance = info.get("instance") if isinstance(info, dict) else None
|
||||||
|
if instance is not None:
|
||||||
|
methodInstances[shortName] = instance
|
||||||
|
|
||||||
|
if not methodInstances:
|
||||||
|
return None
|
||||||
|
|
||||||
|
actionsRegistry = _buildActionsRegistryFromMethods(methodInstances)
|
||||||
|
report = _validateAllAdapters(list(STATIC_NODE_TYPES), actionsRegistry)
|
||||||
|
formatted = _formatAdapterReport(report)
|
||||||
|
if not report.isHealthy:
|
||||||
|
logger.warning("[adapterValidator] %s", formatted)
|
||||||
|
elif report.warnings:
|
||||||
|
logger.info("[adapterValidator] %s", formatted)
|
||||||
|
return formatted
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -14,7 +14,7 @@ from fastapi import APIRouter, Depends, Path, Query, Body, Request, HTTPExceptio
|
||||||
from fastapi.responses import JSONResponse, StreamingResponse, Response
|
from fastapi.responses import JSONResponse, StreamingResponse, Response
|
||||||
from modules.auth import limiter, getRequestContext, RequestContext
|
from modules.auth import limiter, getRequestContext, RequestContext
|
||||||
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
|
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
|
||||||
from modules.routes.routeHelpers import _applyFiltersAndSort
|
from modules.routes.routeHelpers import applyFiltersAndSort
|
||||||
|
|
||||||
from modules.features.graphicalEditor.mainGraphicalEditor import getGraphicalEditorServices
|
from modules.features.graphicalEditor.mainGraphicalEditor import getGraphicalEditorServices
|
||||||
from modules.features.graphicalEditor.nodeRegistry import getNodeTypesForApi
|
from modules.features.graphicalEditor.nodeRegistry import getNodeTypesForApi
|
||||||
|
|
@ -26,6 +26,7 @@ from modules.workflows.automation2.runEnvelope import (
|
||||||
normalize_run_envelope,
|
normalize_run_envelope,
|
||||||
)
|
)
|
||||||
from modules.features.graphicalEditor.entryPoints import find_invocation
|
from modules.features.graphicalEditor.entryPoints import find_invocation
|
||||||
|
from modules.features.graphicalEditor.upstreamPathsService import compute_upstream_paths
|
||||||
from modules.shared.i18nRegistry import apiRouteContext, resolveText
|
from modules.shared.i18nRegistry import apiRouteContext, resolveText
|
||||||
routeApiMsg = apiRouteContext("routeFeatureGraphicalEditor")
|
routeApiMsg = apiRouteContext("routeFeatureGraphicalEditor")
|
||||||
|
|
||||||
|
|
@ -110,6 +111,44 @@ def _validateInstanceAccess(instanceId: str, context: RequestContext) -> str:
|
||||||
return str(instance.mandateId) if instance.mandateId else ""
|
return str(instance.mandateId) if instance.mandateId else ""
|
||||||
|
|
||||||
|
|
||||||
|
def _validateTargetInstance(
|
||||||
|
workflowData: Dict[str, Any],
|
||||||
|
ownerInstanceId: str,
|
||||||
|
context: RequestContext,
|
||||||
|
) -> None:
|
||||||
|
"""Enforce targetFeatureInstanceId rules for non-template workflows.
|
||||||
|
|
||||||
|
- Templates (isTemplate=True) may omit targetFeatureInstanceId.
|
||||||
|
- Non-templates MUST have a non-empty targetFeatureInstanceId.
|
||||||
|
- If the targetFeatureInstanceId differs from the GE owner instance,
|
||||||
|
the user must also have FeatureAccess on that target instance.
|
||||||
|
"""
|
||||||
|
if workflowData.get("isTemplate"):
|
||||||
|
return
|
||||||
|
|
||||||
|
targetId = workflowData.get("targetFeatureInstanceId")
|
||||||
|
if not targetId:
|
||||||
|
return
|
||||||
|
|
||||||
|
if targetId == ownerInstanceId:
|
||||||
|
return
|
||||||
|
|
||||||
|
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||||
|
rootInterface = getRootInterface()
|
||||||
|
targetInstance = rootInterface.getFeatureInstance(targetId)
|
||||||
|
if not targetInstance:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=routeApiMsg("targetFeatureInstanceId refers to a non-existent feature instance"),
|
||||||
|
)
|
||||||
|
targetAccess = rootInterface.getFeatureAccess(str(context.user.id), targetId)
|
||||||
|
if not targetAccess or not targetAccess.enabled:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=403,
|
||||||
|
detail=routeApiMsg("Access denied to target feature instance"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{instanceId}/node-types")
|
@router.get("/{instanceId}/node-types")
|
||||||
@limiter.limit("60/minute")
|
@limiter.limit("60/minute")
|
||||||
def get_node_types(
|
def get_node_types(
|
||||||
|
|
@ -135,6 +174,48 @@ def get_node_types(
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{instanceId}/upstream-paths")
|
||||||
|
@limiter.limit("60/minute")
|
||||||
|
def post_upstream_paths(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str = Path(..., description="Feature instance ID"),
|
||||||
|
body: Dict[str, Any] = Body(...),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> dict:
|
||||||
|
"""Return pickable upstream DataRef paths for a node (draft graph in body)."""
|
||||||
|
_validateInstanceAccess(instanceId, context)
|
||||||
|
graph = body.get("graph")
|
||||||
|
node_id = body.get("nodeId")
|
||||||
|
if not isinstance(graph, dict) or not node_id:
|
||||||
|
raise HTTPException(status_code=400, detail=routeApiMsg("graph and nodeId are required"))
|
||||||
|
paths = compute_upstream_paths(graph, str(node_id))
|
||||||
|
return {"paths": paths}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/upstream-paths/{node_id}")
|
||||||
|
@limiter.limit("60/minute")
|
||||||
|
def get_upstream_paths_saved(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str = Path(..., description="Feature instance ID"),
|
||||||
|
node_id: str = Path(..., description="Target node id"),
|
||||||
|
workflowId: str = Query(..., description="Workflow id whose saved graph is used"),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> dict:
|
||||||
|
"""Return upstream paths using the persisted workflow graph (same payload as POST variant)."""
|
||||||
|
mandate_id = _validateInstanceAccess(instanceId, context)
|
||||||
|
if not workflowId:
|
||||||
|
raise HTTPException(status_code=400, detail=routeApiMsg("workflowId is required"))
|
||||||
|
from modules.features.graphicalEditor.interfaceFeatureGraphicalEditor import getGraphicalEditorInterface
|
||||||
|
|
||||||
|
iface = getGraphicalEditorInterface(context.user, mandate_id, featureInstanceId=instanceId)
|
||||||
|
wf = iface.getWorkflow(workflowId)
|
||||||
|
if not wf:
|
||||||
|
raise HTTPException(status_code=404, detail=routeApiMsg("Workflow not found"))
|
||||||
|
graph = wf.get("graph") or {}
|
||||||
|
paths = compute_upstream_paths(graph if isinstance(graph, dict) else {}, str(node_id))
|
||||||
|
return {"paths": paths}
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{instanceId}/options/user.connection")
|
@router.get("/{instanceId}/options/user.connection")
|
||||||
@limiter.limit("60/minute")
|
@limiter.limit("60/minute")
|
||||||
def get_user_connection_options(
|
def get_user_connection_options(
|
||||||
|
|
@ -187,6 +268,65 @@ def get_user_connection_options(
|
||||||
return {"options": options}
|
return {"options": options}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/options/feature.instance")
|
||||||
|
@limiter.limit("60/minute")
|
||||||
|
def get_feature_instance_options(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str = Path(..., description="GraphicalEditor feature instance ID (workflow context)"),
|
||||||
|
featureCode: str = Query(..., description="Feature code to filter by (e.g. 'trustee', 'redmine', 'clickup')"),
|
||||||
|
enabledOnly: bool = Query(True, description="If true (default), only enabled feature instances are returned"),
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
) -> dict:
|
||||||
|
"""Return mandate-scoped FeatureInstances for the given featureCode.
|
||||||
|
|
||||||
|
Used by node parameters with frontendType='featureInstance' (e.g. Trustee
|
||||||
|
or Redmine nodes that need to bind to a specific tenant FeatureInstance).
|
||||||
|
Always restricted to the calling user's mandate (derived from the workflow
|
||||||
|
feature instance) so the picker never leaks foreign-mandate instances.
|
||||||
|
|
||||||
|
Response: { options: [ { value: "<id>", label: "<displayName> ([code])" } ] }
|
||||||
|
"""
|
||||||
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
if not context.user:
|
||||||
|
raise HTTPException(status_code=401, detail=routeApiMsg("Authentication required"))
|
||||||
|
code = (featureCode or "").strip().lower()
|
||||||
|
if not code:
|
||||||
|
raise HTTPException(status_code=400, detail=routeApiMsg("featureCode query parameter is required"))
|
||||||
|
if not mandateId:
|
||||||
|
return {"options": []}
|
||||||
|
|
||||||
|
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||||
|
rootInterface = getRootInterface()
|
||||||
|
try:
|
||||||
|
instances = rootInterface.getFeatureInstancesByMandate(
|
||||||
|
mandateId, enabledOnly=bool(enabledOnly)
|
||||||
|
) or []
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
"get_feature_instance_options: failed to load instances mandateId=%s: %s",
|
||||||
|
mandateId, e, exc_info=True,
|
||||||
|
)
|
||||||
|
return {"options": []}
|
||||||
|
|
||||||
|
options: List[Dict[str, str]] = []
|
||||||
|
for fi in instances:
|
||||||
|
fiCode = (getattr(fi, "featureCode", "") or "").strip().lower()
|
||||||
|
if fiCode != code:
|
||||||
|
continue
|
||||||
|
fiId = str(getattr(fi, "id", "") or "")
|
||||||
|
if not fiId:
|
||||||
|
continue
|
||||||
|
rawLabel = getattr(fi, "label", None) or getattr(fi, "name", None) or fiId
|
||||||
|
options.append({"value": fiId, "label": f"{rawLabel} ({fiCode})"})
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"graphicalEditor feature.instance options: instanceId=%s mandateId=%s "
|
||||||
|
"featureCode=%s enabledOnly=%s -> %d options",
|
||||||
|
instanceId, mandateId, code, enabledOnly, len(options),
|
||||||
|
)
|
||||||
|
return {"options": options}
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{instanceId}/execute")
|
@router.post("/{instanceId}/execute")
|
||||||
@limiter.limit("30/minute")
|
@limiter.limit("30/minute")
|
||||||
async def post_execute(
|
async def post_execute(
|
||||||
|
|
@ -216,9 +356,12 @@ async def post_execute(
|
||||||
workflowId = body.get("workflowId")
|
workflowId = body.get("workflowId")
|
||||||
req_nodes = graph.get("nodes") or []
|
req_nodes = graph.get("nodes") or []
|
||||||
workflow_for_envelope: Optional[Dict[str, Any]] = None
|
workflow_for_envelope: Optional[Dict[str, Any]] = None
|
||||||
|
targetFeatureInstanceId: Optional[str] = None
|
||||||
if workflowId and not str(workflowId).startswith("transient-"):
|
if workflowId and not str(workflowId).startswith("transient-"):
|
||||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||||
workflow_for_envelope = iface.getWorkflow(workflowId)
|
workflow_for_envelope = iface.getWorkflow(workflowId)
|
||||||
|
if workflow_for_envelope:
|
||||||
|
targetFeatureInstanceId = workflow_for_envelope.get("targetFeatureInstanceId")
|
||||||
if workflowId and len(req_nodes) == 0:
|
if workflowId and len(req_nodes) == 0:
|
||||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||||
wf = iface.getWorkflow(workflowId)
|
wf = iface.getWorkflow(workflowId)
|
||||||
|
|
@ -226,10 +369,18 @@ async def post_execute(
|
||||||
graph = wf["graph"]
|
graph = wf["graph"]
|
||||||
logger.info("graphicalEditor execute: loaded graph from workflow %s", workflowId)
|
logger.info("graphicalEditor execute: loaded graph from workflow %s", workflowId)
|
||||||
workflow_for_envelope = wf
|
workflow_for_envelope = wf
|
||||||
|
targetFeatureInstanceId = wf.get("targetFeatureInstanceId")
|
||||||
if not workflowId:
|
if not workflowId:
|
||||||
import uuid
|
import uuid
|
||||||
workflowId = f"transient-{uuid.uuid4().hex[:12]}"
|
workflowId = f"transient-{uuid.uuid4().hex[:12]}"
|
||||||
logger.info("graphicalEditor execute: using transient workflowId=%s", workflowId)
|
logger.info("graphicalEditor execute: using transient workflowId=%s", workflowId)
|
||||||
|
|
||||||
|
if targetFeatureInstanceId and targetFeatureInstanceId != instanceId:
|
||||||
|
_validateTargetInstance(
|
||||||
|
{"targetFeatureInstanceId": targetFeatureInstanceId},
|
||||||
|
instanceId,
|
||||||
|
context,
|
||||||
|
)
|
||||||
nodes_count = len(graph.get("nodes") or [])
|
nodes_count = len(graph.get("nodes") or [])
|
||||||
connections_count = len(graph.get("connections") or [])
|
connections_count = len(graph.get("connections") or [])
|
||||||
logger.info(
|
logger.info(
|
||||||
|
|
@ -261,6 +412,7 @@ async def post_execute(
|
||||||
automation2_interface=ge_interface,
|
automation2_interface=ge_interface,
|
||||||
run_envelope=run_env,
|
run_envelope=run_env,
|
||||||
label=_wfLabel,
|
label=_wfLabel,
|
||||||
|
targetFeatureInstanceId=targetFeatureInstanceId,
|
||||||
)
|
)
|
||||||
logger.info(
|
logger.info(
|
||||||
"graphicalEditor execute result: success=%s error=%s nodeOutputs_keys=%s failedNode=%s paused=%s",
|
"graphicalEditor execute result: success=%s error=%s nodeOutputs_keys=%s failedNode=%s paused=%s",
|
||||||
|
|
@ -424,13 +576,35 @@ def get_templates(
|
||||||
instanceId: str = Path(..., description="Feature instance ID"),
|
instanceId: str = Path(..., description="Feature instance ID"),
|
||||||
scope: Optional[str] = Query(None, description="Filter by scope: user, instance, mandate, system"),
|
scope: Optional[str] = Query(None, description="Filter by scope: user, instance, mandate, system"),
|
||||||
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
|
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
|
||||||
|
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
|
||||||
|
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||||
context: RequestContext = Depends(getRequestContext),
|
context: RequestContext = Depends(getRequestContext),
|
||||||
):
|
):
|
||||||
"""List workflow templates with optional pagination."""
|
"""List workflow templates with optional pagination.
|
||||||
|
|
||||||
|
Supports the FormGeneratorTable backend pattern:
|
||||||
|
- default: paginated/filtered/sorted ``{items, pagination}`` response
|
||||||
|
- ``mode=filterValues&column=X``: distinct values for column X (cross-filtered)
|
||||||
|
- ``mode=ids``: all IDs matching current filters
|
||||||
|
"""
|
||||||
mandateId = _validateInstanceAccess(instanceId, context)
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||||
templates = iface.getTemplates(scope=scope)
|
templates = iface.getTemplates(scope=scope)
|
||||||
|
|
||||||
|
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||||
|
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import AutoWorkflow
|
||||||
|
enrichRowsWithFkLabels(templates, AutoWorkflow)
|
||||||
|
|
||||||
|
if mode == "filterValues":
|
||||||
|
if not column:
|
||||||
|
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||||
|
from modules.routes.routeHelpers import handleFilterValuesInMemory
|
||||||
|
return handleFilterValuesInMemory(templates, column, pagination)
|
||||||
|
|
||||||
|
if mode == "ids":
|
||||||
|
from modules.routes.routeHelpers import handleIdsInMemory
|
||||||
|
return handleIdsInMemory(templates, pagination)
|
||||||
|
|
||||||
paginationParams = None
|
paginationParams = None
|
||||||
if pagination:
|
if pagination:
|
||||||
try:
|
try:
|
||||||
|
|
@ -442,7 +616,7 @@ def get_templates(
|
||||||
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
||||||
|
|
||||||
if paginationParams:
|
if paginationParams:
|
||||||
filtered = _applyFiltersAndSort(templates, paginationParams)
|
filtered = applyFiltersAndSort(templates, paginationParams)
|
||||||
totalItems = len(filtered)
|
totalItems = len(filtered)
|
||||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||||
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
|
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||||
|
|
@ -813,6 +987,7 @@ async def _runEditorAgent(
|
||||||
"\n\nAvailable tools (all valid — use whichever the user's intent calls for):"
|
"\n\nAvailable tools (all valid — use whichever the user's intent calls for):"
|
||||||
"\n Graph-mutating: readWorkflowGraph, listAvailableNodeTypes, "
|
"\n Graph-mutating: readWorkflowGraph, listAvailableNodeTypes, "
|
||||||
"describeNodeType, addNode, removeNode, connectNodes, setNodeParameter, "
|
"describeNodeType, addNode, removeNode, connectNodes, setNodeParameter, "
|
||||||
|
"listUpstreamPaths, bindNodeParameter, "
|
||||||
"autoLayoutWorkflow, validateGraph."
|
"autoLayoutWorkflow, validateGraph."
|
||||||
"\n Workflow lifecycle: createWorkflow (new empty workflow), "
|
"\n Workflow lifecycle: createWorkflow (new empty workflow), "
|
||||||
"updateWorkflowMetadata (rename / change description / tags / activate), "
|
"updateWorkflowMetadata (rename / change description / tags / activate), "
|
||||||
|
|
@ -844,6 +1019,8 @@ async def _runEditorAgent(
|
||||||
"description, sane defaults, or — for required user-connection fields — "
|
"description, sane defaults, or — for required user-connection fields — "
|
||||||
"an actual connectionId). Do NOT pass position; the layout step handles it."
|
"an actual connectionId). Do NOT pass position; the layout step handles it."
|
||||||
"\n6. connectNodes — wire the nodes consistent with port schemas from describeNodeType."
|
"\n6. connectNodes — wire the nodes consistent with port schemas from describeNodeType."
|
||||||
|
"\n6b. When a parameter must take data from an upstream node, call listUpstreamPaths(nodeId=target) "
|
||||||
|
"then bindNodeParameter(producerNodeId, path, parameterName) — do not rely on implicit wire fill."
|
||||||
"\n7. autoLayoutWorkflow — call exactly once as the LAST graph-mutating step so the "
|
"\n7. autoLayoutWorkflow — call exactly once as the LAST graph-mutating step so the "
|
||||||
"canvas shows a readable top-down layout instead of overlapping boxes."
|
"canvas shows a readable top-down layout instead of overlapping boxes."
|
||||||
"\n8. validateGraph — sanity check, then answer the user."
|
"\n8. validateGraph — sanity check, then answer the user."
|
||||||
|
|
@ -860,15 +1037,15 @@ async def _runEditorAgent(
|
||||||
|
|
||||||
enrichedPrompt = prompt
|
enrichedPrompt = prompt
|
||||||
if dataSourceIds:
|
if dataSourceIds:
|
||||||
from modules.features.workspace.routeFeatureWorkspace import _buildDataSourceContext
|
from modules.features.workspace.routeFeatureWorkspace import buildDataSourceContext
|
||||||
chatSvc = getService("chat", ctx)
|
chatSvc = getService("chat", ctx)
|
||||||
dsInfo = _buildDataSourceContext(chatSvc, dataSourceIds)
|
dsInfo = buildDataSourceContext(chatSvc, dataSourceIds)
|
||||||
if dsInfo:
|
if dsInfo:
|
||||||
enrichedPrompt = f"{prompt}\n\n[Active Data Sources]\n{dsInfo}"
|
enrichedPrompt = f"{prompt}\n\n[Active Data Sources]\n{dsInfo}"
|
||||||
|
|
||||||
if featureDataSourceIds:
|
if featureDataSourceIds:
|
||||||
from modules.features.workspace.routeFeatureWorkspace import _buildFeatureDataSourceContext
|
from modules.features.workspace.routeFeatureWorkspace import buildFeatureDataSourceContext
|
||||||
fdsInfo = _buildFeatureDataSourceContext(featureDataSourceIds)
|
fdsInfo = buildFeatureDataSourceContext(featureDataSourceIds)
|
||||||
if fdsInfo:
|
if fdsInfo:
|
||||||
enrichedPrompt = f"{enrichedPrompt}\n\n[Attached Feature Data Sources]\n{fdsInfo}"
|
enrichedPrompt = f"{enrichedPrompt}\n\n[Attached Feature Data Sources]\n{fdsInfo}"
|
||||||
|
|
||||||
|
|
@ -1033,6 +1210,9 @@ async def list_connection_services(
|
||||||
"drive": "Google Drive",
|
"drive": "Google Drive",
|
||||||
"gmail": "Gmail",
|
"gmail": "Gmail",
|
||||||
"files": "Files (FTP)",
|
"files": "Files (FTP)",
|
||||||
|
"kdrive": "kDrive",
|
||||||
|
"calendar": "Calendar",
|
||||||
|
"contact": "Contacts",
|
||||||
}
|
}
|
||||||
_serviceIcons = {
|
_serviceIcons = {
|
||||||
"sharepoint": "sharepoint",
|
"sharepoint": "sharepoint",
|
||||||
|
|
@ -1043,6 +1223,9 @@ async def list_connection_services(
|
||||||
"drive": "cloud",
|
"drive": "cloud",
|
||||||
"gmail": "mail",
|
"gmail": "mail",
|
||||||
"files": "folder",
|
"files": "folder",
|
||||||
|
"kdrive": "cloud",
|
||||||
|
"calendar": "calendar",
|
||||||
|
"contact": "contact",
|
||||||
}
|
}
|
||||||
items = [
|
items = [
|
||||||
{"service": s, "label": _serviceLabels.get(s, s), "icon": _serviceIcons.get(s, "folder")}
|
{"service": s, "label": _serviceLabels.get(s, s), "icon": _serviceIcons.get(s, "folder")}
|
||||||
|
|
@ -1133,9 +1316,17 @@ def get_workflows(
|
||||||
instanceId: str = Path(..., description="Feature instance ID"),
|
instanceId: str = Path(..., description="Feature instance ID"),
|
||||||
active: Optional[bool] = Query(None, description="Filter by active: true|false"),
|
active: Optional[bool] = Query(None, description="Filter by active: true|false"),
|
||||||
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
|
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
|
||||||
|
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
|
||||||
|
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||||
context: RequestContext = Depends(getRequestContext),
|
context: RequestContext = Depends(getRequestContext),
|
||||||
):
|
):
|
||||||
"""List all workflows for this feature instance."""
|
"""List all workflows for this feature instance.
|
||||||
|
|
||||||
|
Supports the FormGeneratorTable backend pattern:
|
||||||
|
- default: paginated/filtered/sorted ``{items, pagination}`` response
|
||||||
|
- ``mode=filterValues&column=X``: distinct values for column X (cross-filtered)
|
||||||
|
- ``mode=ids``: all IDs matching current filters (for "select all")
|
||||||
|
"""
|
||||||
mandateId = _validateInstanceAccess(instanceId, context)
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||||
items = iface.getWorkflows(active=active)
|
items = iface.getWorkflows(active=active)
|
||||||
|
|
@ -1163,10 +1354,19 @@ def get_workflows(
|
||||||
"runStatus": active_run.get("status") if active_run else None,
|
"runStatus": active_run.get("status") if active_run else None,
|
||||||
"stuckAtNodeId": stuck_at_node_id,
|
"stuckAtNodeId": stuck_at_node_id,
|
||||||
"stuckAtNodeLabel": stuck_at_node_label or stuck_at_node_id or "",
|
"stuckAtNodeLabel": stuck_at_node_label or stuck_at_node_id or "",
|
||||||
"createdAt": wf.get("sysCreatedAt"),
|
|
||||||
"lastStartedAt": last_started_at,
|
"lastStartedAt": last_started_at,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if mode == "filterValues":
|
||||||
|
if not column:
|
||||||
|
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||||
|
from modules.routes.routeHelpers import handleFilterValuesInMemory
|
||||||
|
return handleFilterValuesInMemory(enriched, column, pagination)
|
||||||
|
|
||||||
|
if mode == "ids":
|
||||||
|
from modules.routes.routeHelpers import handleIdsInMemory
|
||||||
|
return handleIdsInMemory(enriched, pagination)
|
||||||
|
|
||||||
paginationParams = None
|
paginationParams = None
|
||||||
if pagination:
|
if pagination:
|
||||||
try:
|
try:
|
||||||
|
|
@ -1178,7 +1378,7 @@ def get_workflows(
|
||||||
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
||||||
|
|
||||||
if paginationParams:
|
if paginationParams:
|
||||||
filtered = _applyFiltersAndSort(enriched, paginationParams)
|
filtered = applyFiltersAndSort(enriched, paginationParams)
|
||||||
totalItems = len(filtered)
|
totalItems = len(filtered)
|
||||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||||
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
|
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||||
|
|
@ -1221,6 +1421,7 @@ def create_workflow(
|
||||||
) -> dict:
|
) -> dict:
|
||||||
"""Create a new workflow."""
|
"""Create a new workflow."""
|
||||||
mandateId = _validateInstanceAccess(instanceId, context)
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
_validateTargetInstance(body, instanceId, context)
|
||||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||||
created = iface.createWorkflow(body)
|
created = iface.createWorkflow(body)
|
||||||
return created
|
return created
|
||||||
|
|
@ -1238,6 +1439,11 @@ def update_workflow(
|
||||||
"""Update a workflow."""
|
"""Update a workflow."""
|
||||||
mandateId = _validateInstanceAccess(instanceId, context)
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||||
|
existing = iface.getWorkflow(workflowId)
|
||||||
|
if not existing:
|
||||||
|
raise HTTPException(status_code=404, detail=routeApiMsg("Workflow not found"))
|
||||||
|
merged = {**existing, **body}
|
||||||
|
_validateTargetInstance(merged, instanceId, context)
|
||||||
updated = iface.updateWorkflow(workflowId, body)
|
updated = iface.updateWorkflow(workflowId, body)
|
||||||
if not updated:
|
if not updated:
|
||||||
raise HTTPException(status_code=404, detail=routeApiMsg("Workflow not found"))
|
raise HTTPException(status_code=404, detail=routeApiMsg("Workflow not found"))
|
||||||
|
|
|
||||||
162
modules/features/graphicalEditor/upstreamPathsService.py
Normal file
162
modules/features/graphicalEditor/upstreamPathsService.py
Normal file
|
|
@ -0,0 +1,162 @@
|
||||||
|
# Copyright (c) 2025 Patrick Motsch
|
||||||
|
"""Compute pickable upstream paths for DataPicker / AI workflow tools."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any, Dict, List, Set
|
||||||
|
|
||||||
|
from modules.features.graphicalEditor.nodeDefinitions import STATIC_NODE_TYPES
|
||||||
|
from modules.features.graphicalEditor.portTypes import PORT_TYPE_CATALOG, PortSchema, parse_graph_defined_output_schema
|
||||||
|
from modules.workflows.automation2.graphUtils import buildConnectionMap
|
||||||
|
|
||||||
|
_NODE_BY_TYPE = {n["id"]: n for n in STATIC_NODE_TYPES}
|
||||||
|
|
||||||
|
|
||||||
|
def _paths_for_port_schema(schema: PortSchema, producer_node_id: str) -> List[Dict[str, Any]]:
|
||||||
|
out: List[Dict[str, Any]] = []
|
||||||
|
for field in schema.fields:
|
||||||
|
path = [field.name]
|
||||||
|
out.append(
|
||||||
|
{
|
||||||
|
"producerNodeId": producer_node_id,
|
||||||
|
"path": path,
|
||||||
|
"type": field.type,
|
||||||
|
"label": ".".join(str(p) for p in path),
|
||||||
|
"scopeOrigin": "data",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
out.append(
|
||||||
|
{
|
||||||
|
"producerNodeId": producer_node_id,
|
||||||
|
"path": [],
|
||||||
|
"type": schema.name,
|
||||||
|
"label": "(whole output)",
|
||||||
|
"scopeOrigin": "data",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def _paths_for_data_pick_options(
|
||||||
|
options: List[Dict[str, Any]],
|
||||||
|
producer_node_id: str,
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Explicit per-port pick list from node definition (authoritative; no catalog expansion)."""
|
||||||
|
out: List[Dict[str, Any]] = []
|
||||||
|
for o in options:
|
||||||
|
if not isinstance(o, dict):
|
||||||
|
continue
|
||||||
|
path = o.get("path")
|
||||||
|
if not isinstance(path, list):
|
||||||
|
continue
|
||||||
|
label = o.get("pickerLabel")
|
||||||
|
out.append(
|
||||||
|
{
|
||||||
|
"producerNodeId": producer_node_id,
|
||||||
|
"path": path,
|
||||||
|
"type": o.get("type") or "Any",
|
||||||
|
"label": label if isinstance(label, str) else ".".join(str(p) for p in path),
|
||||||
|
"scopeOrigin": "data",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def _paths_for_schema(schema_name: str, producer_node_id: str) -> List[Dict[str, Any]]:
|
||||||
|
if not schema_name or schema_name == "Transit":
|
||||||
|
return []
|
||||||
|
schema = PORT_TYPE_CATALOG.get(schema_name)
|
||||||
|
if not schema:
|
||||||
|
return []
|
||||||
|
return _paths_for_port_schema(schema, producer_node_id)
|
||||||
|
|
||||||
|
|
||||||
|
def compute_upstream_paths(graph: Dict[str, Any], target_node_id: str) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Return flattened first-level paths for every ancestor node's primary output schema.
|
||||||
|
"""
|
||||||
|
nodes = graph.get("nodes") or []
|
||||||
|
connections = graph.get("connections") or []
|
||||||
|
node_by_id = {n["id"]: n for n in nodes if n.get("id")}
|
||||||
|
if target_node_id not in node_by_id:
|
||||||
|
return []
|
||||||
|
|
||||||
|
conn_map = buildConnectionMap(connections)
|
||||||
|
# predecessors: walk backwards along edges (target -> source)
|
||||||
|
preds: Dict[str, Set[str]] = {}
|
||||||
|
for tgt, pairs in conn_map.items():
|
||||||
|
for src, _, _ in pairs:
|
||||||
|
preds.setdefault(tgt, set()).add(src)
|
||||||
|
|
||||||
|
seen: Set[str] = set()
|
||||||
|
stack = [target_node_id]
|
||||||
|
ancestors: Set[str] = set()
|
||||||
|
while stack:
|
||||||
|
cur = stack.pop()
|
||||||
|
for p in preds.get(cur, ()):
|
||||||
|
if p not in seen:
|
||||||
|
seen.add(p)
|
||||||
|
ancestors.add(p)
|
||||||
|
stack.append(p)
|
||||||
|
|
||||||
|
paths: List[Dict[str, Any]] = []
|
||||||
|
for aid in sorted(ancestors):
|
||||||
|
anode = node_by_id.get(aid)
|
||||||
|
if not anode:
|
||||||
|
continue
|
||||||
|
nt = anode.get("type", "")
|
||||||
|
ndef = _NODE_BY_TYPE.get(nt)
|
||||||
|
if not ndef:
|
||||||
|
continue
|
||||||
|
out0 = (ndef.get("outputPorts") or {}).get(0, {})
|
||||||
|
out0 = out0 if isinstance(out0, dict) else {}
|
||||||
|
dpo = out0.get("dataPickOptions")
|
||||||
|
if isinstance(dpo, list) and len(dpo) > 0:
|
||||||
|
plab = (anode.get("title") or "").strip() or aid
|
||||||
|
for entry in _paths_for_data_pick_options(dpo, aid):
|
||||||
|
entry["producerLabel"] = plab
|
||||||
|
paths.append(entry)
|
||||||
|
continue
|
||||||
|
|
||||||
|
derived = parse_graph_defined_output_schema(anode, out0)
|
||||||
|
if derived:
|
||||||
|
for entry in _paths_for_port_schema(derived, aid):
|
||||||
|
entry["producerLabel"] = (anode.get("title") or "").strip() or aid
|
||||||
|
paths.append(entry)
|
||||||
|
else:
|
||||||
|
raw_schema = out0.get("schema") if isinstance(out0, dict) else None
|
||||||
|
schema_name = raw_schema if isinstance(raw_schema, str) and raw_schema else "ActionResult"
|
||||||
|
for entry in _paths_for_schema(schema_name, aid):
|
||||||
|
entry["producerLabel"] = (anode.get("title") or "").strip() or aid
|
||||||
|
paths.append(entry)
|
||||||
|
|
||||||
|
# Lexical loop hints (flow.loop): any loop node in ancestors adds synthetic paths
|
||||||
|
for aid in ancestors:
|
||||||
|
anode = node_by_id.get(aid) or {}
|
||||||
|
if anode.get("type") == "flow.loop":
|
||||||
|
paths.extend(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"producerNodeId": aid,
|
||||||
|
"path": ["currentItem"],
|
||||||
|
"type": "Any",
|
||||||
|
"label": "loop.currentItem",
|
||||||
|
"scopeOrigin": "loop",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"producerNodeId": aid,
|
||||||
|
"path": ["currentIndex"],
|
||||||
|
"type": "int",
|
||||||
|
"label": "loop.currentIndex",
|
||||||
|
"scopeOrigin": "loop",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"producerNodeId": aid,
|
||||||
|
"path": ["count"],
|
||||||
|
"type": "int",
|
||||||
|
"label": "loop.count",
|
||||||
|
"scopeOrigin": "loop",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
return paths
|
||||||
|
|
@ -32,7 +32,7 @@ class DataNeutraliserConfig(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
featureInstanceId: str = Field(
|
featureInstanceId: str = Field(
|
||||||
|
|
@ -42,7 +42,7 @@ class DataNeutraliserConfig(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
userId: str = Field(
|
userId: str = Field(
|
||||||
|
|
@ -52,7 +52,7 @@ class DataNeutraliserConfig(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
enabled: bool = Field(
|
enabled: bool = Field(
|
||||||
|
|
@ -107,7 +107,7 @@ class DataNeutralizerAttributes(BaseModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
featureInstanceId: str = Field(
|
featureInstanceId: str = Field(
|
||||||
|
|
@ -117,7 +117,7 @@ class DataNeutralizerAttributes(BaseModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
userId: str = Field(
|
userId: str = Field(
|
||||||
|
|
@ -127,7 +127,7 @@ class DataNeutralizerAttributes(BaseModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
originalText: str = Field(
|
originalText: str = Field(
|
||||||
|
|
@ -142,7 +142,7 @@ class DataNeutralizerAttributes(BaseModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"fk_target": {"db": "poweron_management", "table": "FileItem"},
|
"fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
patternType: str = Field(
|
patternType: str = Field(
|
||||||
|
|
@ -160,16 +160,16 @@ class DataNeutralizationSnapshot(BaseModel):
|
||||||
)
|
)
|
||||||
mandateId: str = Field(
|
mandateId: str = Field(
|
||||||
description="Mandate scope",
|
description="Mandate scope",
|
||||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
featureInstanceId: str = Field(
|
featureInstanceId: str = Field(
|
||||||
default="",
|
default="",
|
||||||
description="Feature instance scope",
|
description="Feature instance scope",
|
||||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
userId: str = Field(
|
userId: str = Field(
|
||||||
description="User who triggered neutralization",
|
description="User who triggered neutralization",
|
||||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||||
)
|
)
|
||||||
sourceLabel: str = Field(
|
sourceLabel: str = Field(
|
||||||
description="Human label, e.g. 'Prompt', 'Kontext', 'Nachricht 3'",
|
description="Human label, e.g. 'Prompt', 'Kontext', 'Nachricht 3'",
|
||||||
|
|
|
||||||
|
|
@ -288,7 +288,7 @@ class Kanton(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": False,
|
"frontend_readonly": False,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"fk_target": {"db": "poweron_realestate", "table": "Land"},
|
"fk_target": {"db": "poweron_realestate", "table": "Land", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
abk: Optional[str] = Field(
|
abk: Optional[str] = Field(
|
||||||
|
|
@ -348,7 +348,7 @@ class Gemeinde(BaseModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": False,
|
"frontend_readonly": False,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"fk_target": {"db": "poweron_realestate", "table": "Kanton"},
|
"fk_target": {"db": "poweron_realestate", "table": "Kanton", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
plz: Optional[str] = Field(
|
plz: Optional[str] = Field(
|
||||||
|
|
@ -398,7 +398,7 @@ class Parzelle(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Mandats-ID",
|
"label": "Mandats-ID",
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
featureInstanceId: str = Field(
|
featureInstanceId: str = Field(
|
||||||
|
|
@ -408,7 +408,7 @@ class Parzelle(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Feature-Instanz-ID",
|
"label": "Feature-Instanz-ID",
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -472,7 +472,7 @@ class Parzelle(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": False,
|
"frontend_readonly": False,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"fk_target": {"db": "poweron_realestate", "table": "Gemeinde"},
|
"fk_target": {"db": "poweron_realestate", "table": "Gemeinde", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -638,7 +638,7 @@ class Projekt(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Mandats-ID",
|
"label": "Mandats-ID",
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
featureInstanceId: str = Field(
|
featureInstanceId: str = Field(
|
||||||
|
|
@ -648,7 +648,7 @@ class Projekt(PowerOnModel):
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"label": "Feature-Instanz-ID",
|
"label": "Feature-Instanz-ID",
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
label: str = Field(
|
label: str = Field(
|
||||||
|
|
|
||||||
|
|
@ -228,31 +228,27 @@ def get_projects(
|
||||||
recordFilter = {"featureInstanceId": instanceId}
|
recordFilter = {"featureInstanceId": instanceId}
|
||||||
|
|
||||||
if mode in ("filterValues", "ids"):
|
if mode in ("filterValues", "ids"):
|
||||||
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
|
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels
|
||||||
items = interface.getProjekte(recordFilter=recordFilter)
|
items = interface.getProjekte(recordFilter=recordFilter)
|
||||||
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
|
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
|
||||||
if mode == "filterValues":
|
if mode == "filterValues":
|
||||||
if not column:
|
if not column:
|
||||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||||
|
enrichRowsWithFkLabels(itemDicts, Projekt)
|
||||||
return handleFilterValuesInMemory(itemDicts, column, pagination)
|
return handleFilterValuesInMemory(itemDicts, column, pagination)
|
||||||
return handleIdsInMemory(itemDicts, pagination)
|
return handleIdsInMemory(itemDicts, pagination)
|
||||||
|
|
||||||
items = interface.getProjekte(recordFilter=recordFilter)
|
items = interface.getProjekte(recordFilter=recordFilter)
|
||||||
paginationParams = _parsePagination(pagination)
|
paginationParams = _parsePagination(pagination)
|
||||||
if paginationParams:
|
if paginationParams:
|
||||||
if paginationParams.sort:
|
from modules.routes.routeHelpers import applyFiltersAndSort
|
||||||
for sort_field in reversed(paginationParams.sort):
|
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
|
||||||
field_name = sort_field.field
|
filtered = applyFiltersAndSort(itemDicts, paginationParams)
|
||||||
direction = sort_field.direction.lower()
|
total_items = len(filtered)
|
||||||
items.sort(
|
|
||||||
key=lambda x: getattr(x, field_name, None),
|
|
||||||
reverse=(direction == "desc")
|
|
||||||
)
|
|
||||||
total_items = len(items)
|
|
||||||
total_pages = (total_items + paginationParams.pageSize - 1) // paginationParams.pageSize
|
total_pages = (total_items + paginationParams.pageSize - 1) // paginationParams.pageSize
|
||||||
start_idx = (paginationParams.page - 1) * paginationParams.pageSize
|
start_idx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||||
end_idx = start_idx + paginationParams.pageSize
|
end_idx = start_idx + paginationParams.pageSize
|
||||||
paginated_items = items[start_idx:end_idx]
|
paginated_items = filtered[start_idx:end_idx]
|
||||||
return PaginatedResponse(
|
return PaginatedResponse(
|
||||||
items=paginated_items,
|
items=paginated_items,
|
||||||
pagination=PaginationMetadata(
|
pagination=PaginationMetadata(
|
||||||
|
|
@ -373,31 +369,27 @@ def get_parcels(
|
||||||
recordFilter = {"featureInstanceId": instanceId}
|
recordFilter = {"featureInstanceId": instanceId}
|
||||||
|
|
||||||
if mode in ("filterValues", "ids"):
|
if mode in ("filterValues", "ids"):
|
||||||
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory
|
from modules.routes.routeHelpers import handleFilterValuesInMemory, handleIdsInMemory, enrichRowsWithFkLabels
|
||||||
items = interface.getParzellen(recordFilter=recordFilter)
|
items = interface.getParzellen(recordFilter=recordFilter)
|
||||||
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
|
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
|
||||||
if mode == "filterValues":
|
if mode == "filterValues":
|
||||||
if not column:
|
if not column:
|
||||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||||
|
enrichRowsWithFkLabels(itemDicts, Parzelle)
|
||||||
return handleFilterValuesInMemory(itemDicts, column, pagination)
|
return handleFilterValuesInMemory(itemDicts, column, pagination)
|
||||||
return handleIdsInMemory(itemDicts, pagination)
|
return handleIdsInMemory(itemDicts, pagination)
|
||||||
|
|
||||||
items = interface.getParzellen(recordFilter=recordFilter)
|
items = interface.getParzellen(recordFilter=recordFilter)
|
||||||
paginationParams = _parsePagination(pagination)
|
paginationParams = _parsePagination(pagination)
|
||||||
if paginationParams:
|
if paginationParams:
|
||||||
if paginationParams.sort:
|
from modules.routes.routeHelpers import applyFiltersAndSort
|
||||||
for sort_field in reversed(paginationParams.sort):
|
itemDicts = [i.model_dump() if hasattr(i, 'model_dump') else i for i in items]
|
||||||
field_name = sort_field.field
|
filtered = applyFiltersAndSort(itemDicts, paginationParams)
|
||||||
direction = sort_field.direction.lower()
|
total_items = len(filtered)
|
||||||
items.sort(
|
|
||||||
key=lambda x: getattr(x, field_name, None),
|
|
||||||
reverse=(direction == "desc")
|
|
||||||
)
|
|
||||||
total_items = len(items)
|
|
||||||
total_pages = (total_items + paginationParams.pageSize - 1) // paginationParams.pageSize
|
total_pages = (total_items + paginationParams.pageSize - 1) // paginationParams.pageSize
|
||||||
start_idx = (paginationParams.page - 1) * paginationParams.pageSize
|
start_idx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||||
end_idx = start_idx + paginationParams.pageSize
|
end_idx = start_idx + paginationParams.pageSize
|
||||||
paginated_items = items[start_idx:end_idx]
|
paginated_items = filtered[start_idx:end_idx]
|
||||||
return PaginatedResponse(
|
return PaginatedResponse(
|
||||||
items=paginated_items,
|
items=paginated_items,
|
||||||
pagination=PaginationMetadata(
|
pagination=PaginationMetadata(
|
||||||
|
|
|
||||||
|
|
@ -75,7 +75,7 @@ class RedmineInstanceConfig(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": True,
|
"frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
mandateId: Optional[str] = Field(
|
mandateId: Optional[str] = Field(
|
||||||
|
|
@ -86,7 +86,7 @@ class RedmineInstanceConfig(PowerOnModel):
|
||||||
"frontend_type": "text",
|
"frontend_type": "text",
|
||||||
"frontend_readonly": True,
|
"frontend_readonly": True,
|
||||||
"frontend_required": False,
|
"frontend_required": False,
|
||||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
baseUrl: str = Field(
|
baseUrl: str = Field(
|
||||||
|
|
@ -195,7 +195,7 @@ class RedmineTicketMirror(PowerOnModel):
|
||||||
featureInstanceId: str = Field(
|
featureInstanceId: str = Field(
|
||||||
description="FK -> FeatureInstance.id",
|
description="FK -> FeatureInstance.id",
|
||||||
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
|
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
mandateId: Optional[str] = Field(
|
mandateId: Optional[str] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
|
|
@ -226,14 +226,14 @@ class RedmineTicketMirror(PowerOnModel):
|
||||||
closedOnTs: Optional[float] = Field(
|
closedOnTs: Optional[float] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="Best-effort UTC epoch when the ticket transitioned to a closed status. Approximated as updatedOnTs for closed tickets at sync time; used by Stats to render the open-vs-total snapshot chart.",
|
description="Best-effort UTC epoch when the ticket transitioned to a closed status. Approximated as updatedOnTs for closed tickets at sync time; used by Stats to render the open-vs-total snapshot chart.",
|
||||||
json_schema_extra={"label": "closedOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
|
json_schema_extra={"label": "closedOn (epoch)", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True},
|
||||||
)
|
)
|
||||||
createdOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Erstellt am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
createdOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Erstellt am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||||
updatedOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Geaendert am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
updatedOn: Optional[str] = Field(default=None, json_schema_extra={"label": "Geaendert am (Redmine)", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False})
|
||||||
createdOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from createdOn (for SQL filtering)",
|
createdOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from createdOn (for SQL filtering)",
|
||||||
json_schema_extra={"label": "createdOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
|
json_schema_extra={"label": "createdOn (epoch)", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
|
||||||
updatedOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from updatedOn (for SQL filtering)",
|
updatedOnTs: Optional[float] = Field(default=None, description="UTC epoch parsed from updatedOn (for SQL filtering)",
|
||||||
json_schema_extra={"label": "updatedOn (epoch)", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
|
json_schema_extra={"label": "updatedOn (epoch)", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "frontend_hidden": True})
|
||||||
customFields: Optional[List[Dict[str, Any]]] = Field(
|
customFields: Optional[List[Dict[str, Any]]] = Field(
|
||||||
default=None,
|
default=None,
|
||||||
description="List of {id,name,value} as returned by Redmine; stored as JSON",
|
description="List of {id,name,value} as returned by Redmine; stored as JSON",
|
||||||
|
|
@ -270,7 +270,7 @@ class RedmineRelationMirror(PowerOnModel):
|
||||||
featureInstanceId: str = Field(
|
featureInstanceId: str = Field(
|
||||||
description="FK -> FeatureInstance.id",
|
description="FK -> FeatureInstance.id",
|
||||||
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
|
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True, "frontend_required": True,
|
||||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||||
)
|
)
|
||||||
redmineRelationId: int = Field(
|
redmineRelationId: int = Field(
|
||||||
description="Redmine relation id (unique per feature instance)",
|
description="Redmine relation id (unique per feature instance)",
|
||||||
|
|
@ -468,17 +468,17 @@ class RedmineSyncResultDto(BaseModel):
|
||||||
ticketsUpserted: int = 0
|
ticketsUpserted: int = 0
|
||||||
relationsUpserted: int = 0
|
relationsUpserted: int = 0
|
||||||
durationMs: int = 0
|
durationMs: int = 0
|
||||||
lastSyncAt: float
|
lastSyncAt: float = Field(json_schema_extra={"frontend_type": "timestamp"})
|
||||||
error: Optional[str] = None
|
error: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class RedmineSyncStatusDto(BaseModel):
|
class RedmineSyncStatusDto(BaseModel):
|
||||||
instanceId: str
|
instanceId: str
|
||||||
lastSyncAt: Optional[float] = None
|
lastSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||||
lastFullSyncAt: Optional[float] = None
|
lastFullSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||||
lastSyncDurationMs: Optional[int] = None
|
lastSyncDurationMs: Optional[int] = None
|
||||||
lastSyncTicketCount: Optional[int] = None
|
lastSyncTicketCount: Optional[int] = None
|
||||||
lastSyncErrorAt: Optional[float] = None
|
lastSyncErrorAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||||
lastSyncErrorMessage: Optional[str] = None
|
lastSyncErrorMessage: Optional[str] = None
|
||||||
mirroredTicketCount: int = 0
|
mirroredTicketCount: int = 0
|
||||||
mirroredRelationCount: int = 0
|
mirroredRelationCount: int = 0
|
||||||
|
|
@ -513,11 +513,11 @@ class RedmineConfigDto(BaseModel):
|
||||||
rootTrackerName: str = "Userstory"
|
rootTrackerName: str = "Userstory"
|
||||||
defaultPeriodValue: Optional[Dict[str, Any]] = None
|
defaultPeriodValue: Optional[Dict[str, Any]] = None
|
||||||
schemaCacheTtlSeconds: int = 24 * 60 * 60
|
schemaCacheTtlSeconds: int = 24 * 60 * 60
|
||||||
schemaCachedAt: Optional[float] = None
|
schemaCachedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||||
isActive: bool = True
|
isActive: bool = True
|
||||||
lastConnectedAt: Optional[float] = None
|
lastConnectedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||||
lastSyncAt: Optional[float] = None
|
lastSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||||
lastFullSyncAt: Optional[float] = None
|
lastFullSyncAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||||
lastSyncTicketCount: Optional[int] = None
|
lastSyncTicketCount: Optional[int] = None
|
||||||
lastSyncErrorMessage: Optional[str] = None
|
lastSyncErrorMessage: Optional[str] = None
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -48,7 +48,7 @@ from modules.features.redmine.interfaceFeatureRedmine import (
|
||||||
RedmineObjects,
|
RedmineObjects,
|
||||||
getInterface,
|
getInterface,
|
||||||
)
|
)
|
||||||
from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
|
from modules.features.redmine.serviceRedmineStatsCache import getStatsCache
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
@ -334,7 +334,7 @@ def getTicket(
|
||||||
|
|
||||||
def _invalidateCache(featureInstanceId: str) -> None:
|
def _invalidateCache(featureInstanceId: str) -> None:
|
||||||
try:
|
try:
|
||||||
_getStatsCache().invalidateInstance(featureInstanceId)
|
getStatsCache().invalidateInstance(featureInstanceId)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Failed to invalidate stats cache for {featureInstanceId}: {e}")
|
logger.warning(f"Failed to invalidate stats cache for {featureInstanceId}: {e}")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -38,7 +38,7 @@ from modules.features.redmine.datamodelRedmine import (
|
||||||
RedmineThroughputBucket,
|
RedmineThroughputBucket,
|
||||||
RedmineTicketDto,
|
RedmineTicketDto,
|
||||||
)
|
)
|
||||||
from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
|
from modules.features.redmine.serviceRedmineStatsCache import getStatsCache
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
@ -69,7 +69,7 @@ async def getStats(
|
||||||
if status_norm not in {"*", "open", "closed"}:
|
if status_norm not in {"*", "open", "closed"}:
|
||||||
status_norm = "*"
|
status_norm = "*"
|
||||||
|
|
||||||
cache = _getStatsCache()
|
cache = getStatsCache()
|
||||||
# Cache key now includes the new dimensions so different filter combos
|
# Cache key now includes the new dimensions so different filter combos
|
||||||
# don't collide. ``_freeze`` (in the cache module) hashes lists/sets
|
# don't collide. ``_freeze`` (in the cache module) hashes lists/sets
|
||||||
# for us, so we can pass them directly as extra dimensions.
|
# for us, so we can pass them directly as extra dimensions.
|
||||||
|
|
|
||||||
|
|
@ -123,7 +123,7 @@ class RedmineStatsCache:
|
||||||
_globalCache: Optional[RedmineStatsCache] = None
|
_globalCache: Optional[RedmineStatsCache] = None
|
||||||
|
|
||||||
|
|
||||||
def _getStatsCache() -> RedmineStatsCache:
|
def getStatsCache() -> RedmineStatsCache:
|
||||||
"""Process-wide singleton."""
|
"""Process-wide singleton."""
|
||||||
global _globalCache
|
global _globalCache
|
||||||
if _globalCache is None:
|
if _globalCache is None:
|
||||||
|
|
|
||||||
|
|
@ -38,7 +38,7 @@ from modules.features.redmine.datamodelRedmine import (
|
||||||
RedmineTicketMirror,
|
RedmineTicketMirror,
|
||||||
)
|
)
|
||||||
from modules.features.redmine.interfaceFeatureRedmine import getInterface
|
from modules.features.redmine.interfaceFeatureRedmine import getInterface
|
||||||
from modules.features.redmine.serviceRedmineStatsCache import _getStatsCache
|
from modules.features.redmine.serviceRedmineStatsCache import getStatsCache
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
@ -79,6 +79,16 @@ async def runSync(
|
||||||
|
|
||||||
async with _lockFor(featureInstanceId):
|
async with _lockFor(featureInstanceId):
|
||||||
started = time.monotonic()
|
started = time.monotonic()
|
||||||
|
|
||||||
|
# CRITICAL: ensure the schema cache (especially the per-status
|
||||||
|
# ``isClosed`` map) is populated BEFORE we iterate issues. Redmine's
|
||||||
|
# /issues.json endpoint only returns ``{id, name}`` for the status
|
||||||
|
# object -- the closed/open flag lives in /issue_statuses.json. If
|
||||||
|
# the cache is empty here, every freshly-synced ticket would land
|
||||||
|
# with ``isClosed=False`` and the Stats page would be useless.
|
||||||
|
await _ensureSchemaWarm(currentUser, mandateId, featureInstanceId)
|
||||||
|
cfg = iface.getConfig(featureInstanceId) # re-read to get warm cache
|
||||||
|
|
||||||
full = force or cfg.lastSyncAt is None
|
full = force or cfg.lastSyncAt is None
|
||||||
updated_from_iso: Optional[str] = None
|
updated_from_iso: Optional[str] = None
|
||||||
if not full and cfg.lastSyncAt is not None:
|
if not full and cfg.lastSyncAt is not None:
|
||||||
|
|
@ -107,6 +117,15 @@ async def runSync(
|
||||||
tickets_upserted += _upsertTicket(iface, featureInstanceId, mandateId, issue, now_epoch)
|
tickets_upserted += _upsertTicket(iface, featureInstanceId, mandateId, issue, now_epoch)
|
||||||
relations_upserted += _replaceRelations(iface, featureInstanceId, issue, now_epoch)
|
relations_upserted += _replaceRelations(iface, featureInstanceId, issue, now_epoch)
|
||||||
|
|
||||||
|
# Self-healing pass: re-apply ``isClosed`` to every mirrored ticket
|
||||||
|
# using the now-warm schema cache. Fixes pre-existing rows that were
|
||||||
|
# synced before the cache was populated (cheap; mirror-local only).
|
||||||
|
flags_fixed = _rebuildIsClosedFromSchema(iface, featureInstanceId, now_epoch)
|
||||||
|
if flags_fixed:
|
||||||
|
logger.info(
|
||||||
|
f"runSync({featureInstanceId}): corrected isClosed on {flags_fixed} mirror rows"
|
||||||
|
)
|
||||||
|
|
||||||
duration_ms = int((time.monotonic() - started) * 1000)
|
duration_ms = int((time.monotonic() - started) * 1000)
|
||||||
iface.recordSyncSuccess(
|
iface.recordSyncSuccess(
|
||||||
featureInstanceId,
|
featureInstanceId,
|
||||||
|
|
@ -115,7 +134,7 @@ async def runSync(
|
||||||
durationMs=duration_ms,
|
durationMs=duration_ms,
|
||||||
lastSyncAt=now_epoch,
|
lastSyncAt=now_epoch,
|
||||||
)
|
)
|
||||||
_getStatsCache().invalidateInstance(featureInstanceId)
|
getStatsCache().invalidateInstance(featureInstanceId)
|
||||||
|
|
||||||
return RedmineSyncResultDto(
|
return RedmineSyncResultDto(
|
||||||
instanceId=featureInstanceId,
|
instanceId=featureInstanceId,
|
||||||
|
|
@ -169,7 +188,7 @@ async def upsertSingleTicket(
|
||||||
now_epoch = time.time()
|
now_epoch = time.time()
|
||||||
_upsertTicket(iface, featureInstanceId, mandateId, issue, now_epoch)
|
_upsertTicket(iface, featureInstanceId, mandateId, issue, now_epoch)
|
||||||
relations_upserted = _replaceRelations(iface, featureInstanceId, issue, now_epoch)
|
relations_upserted = _replaceRelations(iface, featureInstanceId, issue, now_epoch)
|
||||||
_getStatsCache().invalidateInstance(featureInstanceId)
|
getStatsCache().invalidateInstance(featureInstanceId)
|
||||||
return relations_upserted
|
return relations_upserted
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -183,7 +202,7 @@ def deleteMirroredTicket(
|
||||||
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
|
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
|
||||||
deleted = iface.deleteMirroredTicket(featureInstanceId, int(issueId))
|
deleted = iface.deleteMirroredTicket(featureInstanceId, int(issueId))
|
||||||
iface.deleteMirroredRelationsForIssue(featureInstanceId, int(issueId))
|
iface.deleteMirroredRelationsForIssue(featureInstanceId, int(issueId))
|
||||||
_getStatsCache().invalidateInstance(featureInstanceId)
|
getStatsCache().invalidateInstance(featureInstanceId)
|
||||||
return deleted
|
return deleted
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -240,6 +259,80 @@ def _replaceRelations(
|
||||||
return inserted
|
return inserted
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Schema cache warm-up + post-sync isClosed correction
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async def _ensureSchemaWarm(
|
||||||
|
currentUser: User,
|
||||||
|
mandateId: Optional[str],
|
||||||
|
featureInstanceId: str,
|
||||||
|
) -> None:
|
||||||
|
"""Make sure ``cfg.schemaCache['statuses']`` exists with the per-status
|
||||||
|
``isClosed`` flag. Called at the start of every sync because Redmine's
|
||||||
|
``/issues.json`` doesn't expose ``is_closed`` on the inline status
|
||||||
|
object, so we MUST resolve it via the schema.
|
||||||
|
"""
|
||||||
|
iface = getInterface(currentUser, mandateId=mandateId, featureInstanceId=featureInstanceId)
|
||||||
|
cfg = iface.getConfig(featureInstanceId)
|
||||||
|
if cfg is None:
|
||||||
|
return
|
||||||
|
statuses = (cfg.schemaCache or {}).get("statuses") or []
|
||||||
|
if statuses:
|
||||||
|
return
|
||||||
|
# Lazy import to avoid a circular dependency at module load.
|
||||||
|
from modules.features.redmine.serviceRedmine import getProjectMeta
|
||||||
|
try:
|
||||||
|
await getProjectMeta(currentUser, mandateId, featureInstanceId, forceRefresh=True)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(
|
||||||
|
f"_ensureSchemaWarm({featureInstanceId}): could not warm schema cache: {e} "
|
||||||
|
"-- isClosed flags may be inaccurate until next successful schema fetch."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _rebuildIsClosedFromSchema(iface, featureInstanceId: str, nowEpoch: float) -> int:
|
||||||
|
"""Walk the mirror once and fix ``isClosed`` (and ``closedOnTs``) for any
|
||||||
|
ticket whose stored value disagrees with the current schema cache.
|
||||||
|
|
||||||
|
Returns the number of rows that were actually corrected. A no-op when
|
||||||
|
the schema cache has no statuses (logged once, then the caller can
|
||||||
|
decide whether to retry).
|
||||||
|
"""
|
||||||
|
cfg = iface.getConfig(featureInstanceId)
|
||||||
|
if cfg is None:
|
||||||
|
return 0
|
||||||
|
statuses = (cfg.schemaCache or {}).get("statuses") or []
|
||||||
|
if not statuses:
|
||||||
|
return 0
|
||||||
|
closed_ids = {int(s.get("id")) for s in statuses if s.get("id") is not None and s.get("isClosed")}
|
||||||
|
rows = iface.listMirroredTickets(featureInstanceId)
|
||||||
|
corrections = 0
|
||||||
|
for row in rows:
|
||||||
|
sid = row.get("statusId")
|
||||||
|
if sid is None:
|
||||||
|
continue
|
||||||
|
should_be_closed = int(sid) in closed_ids
|
||||||
|
if bool(row.get("isClosed")) == should_be_closed:
|
||||||
|
continue
|
||||||
|
# Only the closed/open flag (and the derived closedOnTs) are
|
||||||
|
# touched here -- everything else came from Redmine and stays.
|
||||||
|
update = {
|
||||||
|
"isClosed": bool(should_be_closed),
|
||||||
|
"closedOnTs": float(row.get("updatedOnTs")) if (should_be_closed and row.get("updatedOnTs") is not None) else None,
|
||||||
|
"syncedAt": nowEpoch,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
iface.upsertMirroredTicket(featureInstanceId, int(row.get("redmineId")), {**row, **update})
|
||||||
|
corrections += 1
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(
|
||||||
|
f"_rebuildIsClosedFromSchema({featureInstanceId}): could not fix ticket "
|
||||||
|
f"#{row.get('redmineId')}: {e}"
|
||||||
|
)
|
||||||
|
return corrections
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Pure helpers
|
# Pure helpers
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,8 @@
|
||||||
Teamsbot Feature - Data Models.
|
Teamsbot Feature - Data Models.
|
||||||
Pydantic models for Teams Bot sessions, transcripts, bot responses, and configuration.
|
Pydantic models for Teams Bot sessions, transcripts, bot responses, and configuration.
|
||||||
"""
|
"""
|
||||||
from typing import Optional, List, Dict, Any
|
from typing import Optional, List, Dict, Any, Literal
|
||||||
|
from datetime import datetime, timezone
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
import uuid
|
import uuid
|
||||||
|
|
@ -12,6 +13,14 @@ import uuid
|
||||||
from modules.datamodels.datamodelBase import PowerOnModel
|
from modules.datamodels.datamodelBase import PowerOnModel
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Director Prompt Limits
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
DIRECTOR_PROMPT_TEXT_LIMIT = 8000
|
||||||
|
DIRECTOR_PROMPT_FILE_LIMIT = 10
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# Enums
|
# Enums
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|
@ -82,8 +91,8 @@ class TeamsbotSession(PowerOnModel):
|
||||||
meetingLink: str = Field(description="Teams meeting join link")
|
meetingLink: str = Field(description="Teams meeting join link")
|
||||||
botName: str = Field(default="AI Assistant", description="Display name of the bot in the meeting")
|
botName: str = Field(default="AI Assistant", description="Display name of the bot in the meeting")
|
||||||
status: TeamsbotSessionStatus = Field(default=TeamsbotSessionStatus.PENDING, description="Current session status")
|
status: TeamsbotSessionStatus = Field(default=TeamsbotSessionStatus.PENDING, description="Current session status")
|
||||||
startedAt: Optional[str] = Field(default=None, description="ISO timestamp when session started")
|
startedAt: Optional[float] = Field(default=None, description="UTC unix timestamp when session started", json_schema_extra={"frontend_type": "timestamp"})
|
||||||
endedAt: Optional[str] = Field(default=None, description="ISO timestamp when session ended")
|
endedAt: Optional[float] = Field(default=None, description="UTC unix timestamp when session ended", json_schema_extra={"frontend_type": "timestamp"})
|
||||||
startedByUserId: str = Field(description="User ID who started the session")
|
startedByUserId: str = Field(description="User ID who started the session")
|
||||||
bridgeSessionId: Optional[str] = Field(default=None, description="Session ID on the .NET Media Bridge")
|
bridgeSessionId: Optional[str] = Field(default=None, description="Session ID on the .NET Media Bridge")
|
||||||
meetingChatId: Optional[str] = Field(default=None, description="Teams meeting chat ID for Graph API messages")
|
meetingChatId: Optional[str] = Field(default=None, description="Teams meeting chat ID for Graph API messages")
|
||||||
|
|
@ -100,7 +109,7 @@ class TeamsbotTranscript(PowerOnModel):
|
||||||
sessionId: str = Field(description="Session ID (FK)")
|
sessionId: str = Field(description="Session ID (FK)")
|
||||||
speaker: Optional[str] = Field(default=None, description="Speaker name or identifier")
|
speaker: Optional[str] = Field(default=None, description="Speaker name or identifier")
|
||||||
text: str = Field(description="Transcribed text")
|
text: str = Field(description="Transcribed text")
|
||||||
timestamp: str = Field(description="ISO timestamp of the speech segment")
|
timestamp: float = Field(description="UTC unix timestamp of the speech segment", json_schema_extra={"frontend_type": "timestamp"})
|
||||||
confidence: float = Field(default=0.0, ge=0.0, le=1.0, description="STT confidence score")
|
confidence: float = Field(default=0.0, ge=0.0, le=1.0, description="STT confidence score")
|
||||||
language: Optional[str] = Field(default=None, description="Detected language code (e.g., de-DE)")
|
language: Optional[str] = Field(default=None, description="Detected language code (e.g., de-DE)")
|
||||||
isFinal: bool = Field(default=True, description="Whether this is a final or interim result")
|
isFinal: bool = Field(default=True, description="Whether this is a final or interim result")
|
||||||
|
|
@ -119,7 +128,7 @@ class TeamsbotBotResponse(PowerOnModel):
|
||||||
modelName: Optional[str] = Field(default=None, description="AI model used for this response")
|
modelName: Optional[str] = Field(default=None, description="AI model used for this response")
|
||||||
processingTime: float = Field(default=0.0, description="Processing time in seconds")
|
processingTime: float = Field(default=0.0, description="Processing time in seconds")
|
||||||
priceCHF: float = Field(default=0.0, description="Cost of this AI call in CHF")
|
priceCHF: float = Field(default=0.0, description="Cost of this AI call in CHF")
|
||||||
timestamp: Optional[str] = Field(default=None, description="ISO timestamp of the response")
|
timestamp: Optional[float] = Field(default=None, description="UTC unix timestamp of the response", json_schema_extra={"frontend_type": "timestamp"})
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|
@ -267,6 +276,56 @@ class SpeechTeamsResponse(BaseModel):
|
||||||
reasoning: str = Field(default="", description="Reasoning for the decision (for logging/debug)")
|
reasoning: str = Field(default="", description="Reasoning for the decision (for logging/debug)")
|
||||||
detectedIntent: str = Field(default="none", description="Detected intent: addressed, question, proactive, stop, none")
|
detectedIntent: str = Field(default="none", description="Detected intent: addressed, question, proactive, stop, none")
|
||||||
commands: Optional[List[TeamsbotCommand]] = Field(default=None, description="Optional list of commands to execute (e.g. toggle transcript, send chat, change language)")
|
commands: Optional[List[TeamsbotCommand]] = Field(default=None, description="Optional list of commands to execute (e.g. toggle transcript, send chat, change language)")
|
||||||
|
needsAgent: bool = Field(default=False, description="If True, escalate to agentService.runAgent for complex multi-step processing (web research, mail, etc.)")
|
||||||
|
agentReason: Optional[str] = Field(default=None, description="Why escalation to the full agent is required (used as task brief for the agent)")
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Director Prompts (private operator instructions sent during a live meeting)
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
class TeamsbotDirectorPromptStatus(str, Enum):
|
||||||
|
"""Lifecycle status of a Director Prompt."""
|
||||||
|
QUEUED = "queued"
|
||||||
|
RUNNING = "running"
|
||||||
|
SUCCEEDED = "succeeded"
|
||||||
|
FAILED = "failed"
|
||||||
|
CONSUMED = "consumed" # one-shot consumed; persistent prompts stay active
|
||||||
|
|
||||||
|
|
||||||
|
class TeamsbotDirectorPromptMode(str, Enum):
|
||||||
|
"""How long a Director Prompt remains effective."""
|
||||||
|
ONE_SHOT = "oneShot"
|
||||||
|
PERSISTENT = "persistent"
|
||||||
|
|
||||||
|
|
||||||
|
class TeamsbotDirectorPrompt(PowerOnModel):
|
||||||
|
"""A private operator instruction injected into the bot during a live meeting.
|
||||||
|
|
||||||
|
Stored in PostgreSQL so it survives reconnects (persistent prompts) and is
|
||||||
|
auditable. Visible only to the session owner via SSE; invisible to other
|
||||||
|
meeting participants.
|
||||||
|
"""
|
||||||
|
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Director prompt ID")
|
||||||
|
sessionId: str = Field(description="Teams Bot session ID (FK)")
|
||||||
|
instanceId: str = Field(description="Feature instance ID (FK)")
|
||||||
|
operatorUserId: str = Field(description="User ID of the operator who issued the prompt")
|
||||||
|
text: str = Field(description="The director instruction text", max_length=DIRECTOR_PROMPT_TEXT_LIMIT)
|
||||||
|
mode: TeamsbotDirectorPromptMode = Field(default=TeamsbotDirectorPromptMode.ONE_SHOT, description="oneShot or persistent")
|
||||||
|
fileIds: List[str] = Field(default_factory=list, description="UDB-selected file/object IDs to attach as RAG context")
|
||||||
|
status: TeamsbotDirectorPromptStatus = Field(default=TeamsbotDirectorPromptStatus.QUEUED, description="Lifecycle status")
|
||||||
|
statusMessage: Optional[str] = Field(default=None, description="Optional error or status detail")
|
||||||
|
createdAt: float = Field(default_factory=lambda: datetime.now(timezone.utc).timestamp(), description="UTC unix timestamp when created", json_schema_extra={"frontend_type": "timestamp"})
|
||||||
|
consumedAt: Optional[float] = Field(default=None, description="UTC unix timestamp when consumed (one-shot) or marked done", json_schema_extra={"frontend_type": "timestamp"})
|
||||||
|
agentRunId: Optional[str] = Field(default=None, description="Reference to the agent run that processed this prompt")
|
||||||
|
responseText: Optional[str] = Field(default=None, description="Final agent text delivered to the meeting")
|
||||||
|
|
||||||
|
|
||||||
|
class TeamsbotDirectorPromptCreateRequest(BaseModel):
|
||||||
|
"""Request body for submitting a new Director Prompt."""
|
||||||
|
text: str = Field(description="Director instruction text", min_length=1, max_length=DIRECTOR_PROMPT_TEXT_LIMIT)
|
||||||
|
mode: TeamsbotDirectorPromptMode = Field(default=TeamsbotDirectorPromptMode.ONE_SHOT, description="oneShot or persistent")
|
||||||
|
fileIds: List[str] = Field(default_factory=list, description="UDB file IDs to attach (max 10)")
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,9 @@ from .datamodelTeamsbot import (
|
||||||
TeamsbotSystemBot,
|
TeamsbotSystemBot,
|
||||||
TeamsbotUserSettings,
|
TeamsbotUserSettings,
|
||||||
TeamsbotUserAccount,
|
TeamsbotUserAccount,
|
||||||
|
TeamsbotDirectorPrompt,
|
||||||
|
TeamsbotDirectorPromptStatus,
|
||||||
|
TeamsbotDirectorPromptMode,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
@ -84,7 +87,7 @@ class TeamsbotObjects:
|
||||||
if not includeEnded:
|
if not includeEnded:
|
||||||
records = [r for r in records if r.get("status") != TeamsbotSessionStatus.ENDED.value]
|
records = [r for r in records if r.get("status") != TeamsbotSessionStatus.ENDED.value]
|
||||||
# Sort by startedAt descending
|
# Sort by startedAt descending
|
||||||
records.sort(key=lambda r: r.get("startedAt") or "", reverse=True)
|
records.sort(key=lambda r: r.get("startedAt") or 0, reverse=True)
|
||||||
return records
|
return records
|
||||||
|
|
||||||
def getActiveSessions(self, instanceId: str) -> List[Dict[str, Any]]:
|
def getActiveSessions(self, instanceId: str) -> List[Dict[str, Any]]:
|
||||||
|
|
@ -114,11 +117,10 @@ class TeamsbotObjects:
|
||||||
return self.db.recordModify(TeamsbotSession, sessionId, updates)
|
return self.db.recordModify(TeamsbotSession, sessionId, updates)
|
||||||
|
|
||||||
def deleteSession(self, sessionId: str) -> bool:
|
def deleteSession(self, sessionId: str) -> bool:
|
||||||
"""Delete a session and all related transcripts and responses."""
|
"""Delete a session and all related transcripts, responses and director prompts."""
|
||||||
# Delete related records first
|
|
||||||
self._deleteTranscriptsBySession(sessionId)
|
self._deleteTranscriptsBySession(sessionId)
|
||||||
self._deleteResponsesBySession(sessionId)
|
self._deleteResponsesBySession(sessionId)
|
||||||
# Delete session
|
self._deletePromptsBySession(sessionId)
|
||||||
return self.db.recordDelete(TeamsbotSession, sessionId)
|
return self.db.recordDelete(TeamsbotSession, sessionId)
|
||||||
|
|
||||||
# =========================================================================
|
# =========================================================================
|
||||||
|
|
@ -131,7 +133,7 @@ class TeamsbotObjects:
|
||||||
TeamsbotTranscript,
|
TeamsbotTranscript,
|
||||||
recordFilter={"sessionId": sessionId},
|
recordFilter={"sessionId": sessionId},
|
||||||
)
|
)
|
||||||
records.sort(key=lambda r: r.get("timestamp") or "")
|
records.sort(key=lambda r: r.get("timestamp") or 0)
|
||||||
if offset:
|
if offset:
|
||||||
records = records[offset:]
|
records = records[offset:]
|
||||||
if limit:
|
if limit:
|
||||||
|
|
@ -144,7 +146,7 @@ class TeamsbotObjects:
|
||||||
TeamsbotTranscript,
|
TeamsbotTranscript,
|
||||||
recordFilter={"sessionId": sessionId},
|
recordFilter={"sessionId": sessionId},
|
||||||
)
|
)
|
||||||
records.sort(key=lambda r: r.get("timestamp") or "")
|
records.sort(key=lambda r: r.get("timestamp") or 0)
|
||||||
return records[-count:]
|
return records[-count:]
|
||||||
|
|
||||||
def createTranscript(self, transcriptData: Dict[str, Any]) -> Dict[str, Any]:
|
def createTranscript(self, transcriptData: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
|
@ -174,7 +176,7 @@ class TeamsbotObjects:
|
||||||
TeamsbotBotResponse,
|
TeamsbotBotResponse,
|
||||||
recordFilter={"sessionId": sessionId},
|
recordFilter={"sessionId": sessionId},
|
||||||
)
|
)
|
||||||
records.sort(key=lambda r: r.get("timestamp") or "")
|
records.sort(key=lambda r: r.get("timestamp") or 0)
|
||||||
return records
|
return records
|
||||||
|
|
||||||
def createBotResponse(self, responseData: Dict[str, Any]) -> Dict[str, Any]:
|
def createBotResponse(self, responseData: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
|
@ -272,6 +274,62 @@ class TeamsbotObjects:
|
||||||
"""Delete saved MS credentials."""
|
"""Delete saved MS credentials."""
|
||||||
return self.db.recordDelete(TeamsbotUserAccount, accountId)
|
return self.db.recordDelete(TeamsbotUserAccount, accountId)
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Director Prompts (private operator instructions during a live meeting)
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
def createDirectorPrompt(self, promptData: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Create a new director prompt record."""
|
||||||
|
return self.db.recordCreate(TeamsbotDirectorPrompt, promptData)
|
||||||
|
|
||||||
|
def getDirectorPrompt(self, promptId: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Get a single director prompt by ID."""
|
||||||
|
records = self.db.getRecordset(TeamsbotDirectorPrompt, recordFilter={"id": promptId})
|
||||||
|
return records[0] if records else None
|
||||||
|
|
||||||
|
def getDirectorPrompts(self, sessionId: str, operatorUserId: str | None = None) -> List[Dict[str, Any]]:
|
||||||
|
"""Get all director prompts for a session, optionally filtered by operator."""
|
||||||
|
recordFilter: Dict[str, Any] = {"sessionId": sessionId}
|
||||||
|
if operatorUserId:
|
||||||
|
recordFilter["operatorUserId"] = operatorUserId
|
||||||
|
records = self.db.getRecordset(TeamsbotDirectorPrompt, recordFilter=recordFilter)
|
||||||
|
records.sort(key=lambda r: r.get("createdAt") or 0)
|
||||||
|
return records
|
||||||
|
|
||||||
|
def getActivePersistentPrompts(self, sessionId: str) -> List[Dict[str, Any]]:
|
||||||
|
"""Get persistent prompts that are still active (not consumed/failed) for a session."""
|
||||||
|
records = self.db.getRecordset(
|
||||||
|
TeamsbotDirectorPrompt,
|
||||||
|
recordFilter={
|
||||||
|
"sessionId": sessionId,
|
||||||
|
"mode": TeamsbotDirectorPromptMode.PERSISTENT.value,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
terminal = {
|
||||||
|
TeamsbotDirectorPromptStatus.CONSUMED.value,
|
||||||
|
TeamsbotDirectorPromptStatus.FAILED.value,
|
||||||
|
}
|
||||||
|
active = [r for r in records if r.get("status") not in terminal]
|
||||||
|
active.sort(key=lambda r: r.get("createdAt") or 0)
|
||||||
|
return active
|
||||||
|
|
||||||
|
def updateDirectorPrompt(self, promptId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Update a director prompt (status, response text, etc.)."""
|
||||||
|
return self.db.recordModify(TeamsbotDirectorPrompt, promptId, updates)
|
||||||
|
|
||||||
|
def deleteDirectorPrompt(self, promptId: str) -> bool:
|
||||||
|
"""Delete a director prompt (e.g. when operator removes a persistent prompt)."""
|
||||||
|
return self.db.recordDelete(TeamsbotDirectorPrompt, promptId)
|
||||||
|
|
||||||
|
def _deletePromptsBySession(self, sessionId: str) -> int:
|
||||||
|
"""Delete all director prompts for a session (called from deleteSession)."""
|
||||||
|
records = self.db.getRecordset(TeamsbotDirectorPrompt, recordFilter={"sessionId": sessionId})
|
||||||
|
count = 0
|
||||||
|
for record in records:
|
||||||
|
self.db.recordDelete(TeamsbotDirectorPrompt, record.get("id"))
|
||||||
|
count += 1
|
||||||
|
return count
|
||||||
|
|
||||||
# =========================================================================
|
# =========================================================================
|
||||||
# Stats / Aggregation
|
# Stats / Aggregation
|
||||||
# =========================================================================
|
# =========================================================================
|
||||||
|
|
|
||||||
|
|
@ -36,6 +36,11 @@ from .datamodelTeamsbot import (
|
||||||
TeamsbotUserAccount,
|
TeamsbotUserAccount,
|
||||||
TeamsbotResponseChannel,
|
TeamsbotResponseChannel,
|
||||||
TeamsbotResponseMode,
|
TeamsbotResponseMode,
|
||||||
|
TeamsbotDirectorPromptCreateRequest,
|
||||||
|
TeamsbotDirectorPromptMode,
|
||||||
|
TeamsbotDirectorPromptStatus,
|
||||||
|
DIRECTOR_PROMPT_FILE_LIMIT,
|
||||||
|
DIRECTOR_PROMPT_TEXT_LIMIT,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Import service
|
# Import service
|
||||||
|
|
@ -378,16 +383,21 @@ async def streamSession(
|
||||||
|
|
||||||
async def _eventGenerator():
|
async def _eventGenerator():
|
||||||
"""Generate SSE events from the session event queue."""
|
"""Generate SSE events from the session event queue."""
|
||||||
from .service import _sessionEvents
|
from .service import sessionEvents
|
||||||
|
|
||||||
# Send initial session state
|
# Send initial session state
|
||||||
yield f"data: {json.dumps({'type': 'sessionState', 'data': session})}\n\n"
|
yield f"data: {json.dumps({'type': 'sessionState', 'data': session})}\n\n"
|
||||||
|
|
||||||
|
# Send current bot WebSocket connection state so the operator UI can
|
||||||
|
# render the live indicator without waiting for the next connect/disconnect.
|
||||||
|
from .service import getActiveService as _getActiveService
|
||||||
|
yield f"data: {json.dumps({'type': 'botConnectionState', 'data': {'connected': _getActiveService(sessionId) is not None}})}\n\n"
|
||||||
|
|
||||||
# Stream events
|
# Stream events
|
||||||
eventQueue = _sessionEvents.get(sessionId)
|
eventQueue = sessionEvents.get(sessionId)
|
||||||
if not eventQueue:
|
if not eventQueue:
|
||||||
_sessionEvents[sessionId] = asyncio.Queue()
|
sessionEvents[sessionId] = asyncio.Queue()
|
||||||
eventQueue = _sessionEvents[sessionId]
|
eventQueue = sessionEvents[sessionId]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
|
|
@ -800,8 +810,8 @@ async def deleteUserAccount(
|
||||||
# MFA Code Submission (relayed to active bot session)
|
# MFA Code Submission (relayed to active bot session)
|
||||||
# =========================================================================
|
# =========================================================================
|
||||||
|
|
||||||
_mfaCodeQueues: dict = {}
|
mfaCodeQueues: dict = {}
|
||||||
_mfaWaitTasks: dict = {}
|
mfaWaitTasks: dict = {}
|
||||||
|
|
||||||
@router.post("/{instanceId}/sessions/{sessionId}/mfa")
|
@router.post("/{instanceId}/sessions/{sessionId}/mfa")
|
||||||
@limiter.limit("10/minute")
|
@limiter.limit("10/minute")
|
||||||
|
|
@ -824,7 +834,7 @@ async def submitMfaCode(
|
||||||
|
|
||||||
logger.info(f"MFA submission for session {sessionId}: action={mfaAction}, codeLen={len(mfaCode)}")
|
logger.info(f"MFA submission for session {sessionId}: action={mfaAction}, codeLen={len(mfaCode)}")
|
||||||
|
|
||||||
queue = _mfaCodeQueues.get(sessionId)
|
queue = mfaCodeQueues.get(sessionId)
|
||||||
if queue:
|
if queue:
|
||||||
await queue.put({"action": mfaAction, "code": mfaCode})
|
await queue.put({"action": mfaAction, "code": mfaCode})
|
||||||
return {"submitted": True}
|
return {"submitted": True}
|
||||||
|
|
@ -832,6 +842,132 @@ async def submitMfaCode(
|
||||||
raise HTTPException(status_code=404, detail=routeApiMsg("No active MFA challenge for this session"))
|
raise HTTPException(status_code=404, detail=routeApiMsg("No active MFA challenge for this session"))
|
||||||
|
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Director Prompts (private operator instructions during a live meeting)
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
@router.post("/{instanceId}/sessions/{sessionId}/directorPrompts")
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
async def submitDirectorPrompt(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
sessionId: str,
|
||||||
|
body: TeamsbotDirectorPromptCreateRequest,
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
"""Submit a private director prompt to the running bot. Triggers the
|
||||||
|
full agent path (web, mail, RAG, etc.) and delivers the answer into the
|
||||||
|
meeting via TTS + chat. Only the session owner can submit prompts."""
|
||||||
|
_validateInstanceAccess(instanceId, context)
|
||||||
|
interface = _getInterface(context, instanceId)
|
||||||
|
|
||||||
|
session = interface.getSession(sessionId)
|
||||||
|
if not session:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Session '{sessionId}' not found")
|
||||||
|
_validateSessionOwnership(session, context)
|
||||||
|
|
||||||
|
if session.get("status") not in (
|
||||||
|
TeamsbotSessionStatus.ACTIVE.value,
|
||||||
|
TeamsbotSessionStatus.JOINING.value,
|
||||||
|
):
|
||||||
|
raise HTTPException(status_code=400, detail=routeApiMsg("Session is not active"))
|
||||||
|
|
||||||
|
text = (body.text or "").strip()
|
||||||
|
if not text:
|
||||||
|
raise HTTPException(status_code=400, detail=routeApiMsg("Prompt text is required"))
|
||||||
|
if len(text) > DIRECTOR_PROMPT_TEXT_LIMIT:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=routeApiMsg(f"Prompt text exceeds limit of {DIRECTOR_PROMPT_TEXT_LIMIT} characters"),
|
||||||
|
)
|
||||||
|
fileIds = list(body.fileIds or [])
|
||||||
|
if len(fileIds) > DIRECTOR_PROMPT_FILE_LIMIT:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=routeApiMsg(f"Too many files ({len(fileIds)}); max {DIRECTOR_PROMPT_FILE_LIMIT}"),
|
||||||
|
)
|
||||||
|
|
||||||
|
from .service import getActiveService
|
||||||
|
service = getActiveService(sessionId)
|
||||||
|
if not service:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=409,
|
||||||
|
detail=routeApiMsg(
|
||||||
|
"Bot is not yet live in the meeting (no WebSocket connection). "
|
||||||
|
"Wait until the bot status indicator turns green and try again."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
created = await service.submitDirectorPrompt(
|
||||||
|
sessionId=sessionId,
|
||||||
|
operatorUserId=str(context.user.id),
|
||||||
|
text=text,
|
||||||
|
mode=body.mode,
|
||||||
|
fileIds=fileIds,
|
||||||
|
)
|
||||||
|
return {"prompt": created}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{instanceId}/sessions/{sessionId}/directorPrompts")
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
async def listDirectorPrompts(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
sessionId: str,
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
"""List director prompts for a session (only operator's own prompts)."""
|
||||||
|
_validateInstanceAccess(instanceId, context)
|
||||||
|
interface = _getInterface(context, instanceId)
|
||||||
|
|
||||||
|
session = interface.getSession(sessionId)
|
||||||
|
if not session:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Session '{sessionId}' not found")
|
||||||
|
_validateSessionOwnership(session, context)
|
||||||
|
|
||||||
|
operatorUserId = None if context.isPlatformAdmin else str(context.user.id)
|
||||||
|
prompts = interface.getDirectorPrompts(sessionId, operatorUserId=operatorUserId)
|
||||||
|
return {"prompts": prompts}
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{instanceId}/sessions/{sessionId}/directorPrompts/{promptId}")
|
||||||
|
@limiter.limit("30/minute")
|
||||||
|
async def deleteDirectorPrompt(
|
||||||
|
request: Request,
|
||||||
|
instanceId: str,
|
||||||
|
sessionId: str,
|
||||||
|
promptId: str,
|
||||||
|
context: RequestContext = Depends(getRequestContext),
|
||||||
|
):
|
||||||
|
"""Remove a (typically persistent) director prompt. Marks it consumed so
|
||||||
|
it no longer influences the bot. The DB record is kept for audit."""
|
||||||
|
_validateInstanceAccess(instanceId, context)
|
||||||
|
interface = _getInterface(context, instanceId)
|
||||||
|
|
||||||
|
session = interface.getSession(sessionId)
|
||||||
|
if not session:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Session '{sessionId}' not found")
|
||||||
|
_validateSessionOwnership(session, context)
|
||||||
|
|
||||||
|
prompt = interface.getDirectorPrompt(promptId)
|
||||||
|
if not prompt or prompt.get("sessionId") != sessionId:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Prompt '{promptId}' not found")
|
||||||
|
if not context.isPlatformAdmin and prompt.get("operatorUserId") != str(context.user.id):
|
||||||
|
raise HTTPException(status_code=404, detail=f"Prompt '{promptId}' not found")
|
||||||
|
|
||||||
|
from .service import getActiveService
|
||||||
|
service = getActiveService(sessionId)
|
||||||
|
if service:
|
||||||
|
await service.removePersistentPrompt(promptId)
|
||||||
|
else:
|
||||||
|
# Bot not connected: mark consumed directly
|
||||||
|
interface.updateDirectorPrompt(promptId, {
|
||||||
|
"status": TeamsbotDirectorPromptStatus.CONSUMED.value,
|
||||||
|
"statusMessage": "Removed by operator (bot offline)",
|
||||||
|
})
|
||||||
|
return {"deleted": True, "promptId": promptId}
|
||||||
|
|
||||||
|
|
||||||
# =========================================================================
|
# =========================================================================
|
||||||
# Voice Test Endpoint
|
# Voice Test Endpoint
|
||||||
# =========================================================================
|
# =========================================================================
|
||||||
|
|
@ -845,7 +981,7 @@ async def testVoice(
|
||||||
):
|
):
|
||||||
"""Test TTS voice with AI-generated sample text in the correct language."""
|
"""Test TTS voice with AI-generated sample text in the correct language."""
|
||||||
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
||||||
from modules.serviceCenter.services.serviceAi.mainServiceAi import AiService
|
from .service import createAiService
|
||||||
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
|
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
|
||||||
|
|
||||||
mandateId = _validateInstanceAccess(instanceId, context)
|
mandateId = _validateInstanceAccess(instanceId, context)
|
||||||
|
|
@ -856,12 +992,7 @@ async def testVoice(
|
||||||
botName = body.get("botName", "AI Assistant")
|
botName = body.get("botName", "AI Assistant")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Generate test text dynamically via AI in the correct language
|
aiService = createAiService(context.user, mandateId, instanceId)
|
||||||
serviceContext = type('Ctx', (), {
|
|
||||||
'user': context.user, 'mandateId': mandateId,
|
|
||||||
'featureInstanceId': instanceId, 'featureCode': 'teamsbot'
|
|
||||||
})()
|
|
||||||
aiService = AiService(serviceCenter=serviceContext)
|
|
||||||
await aiService.ensureAiObjectsInitialized()
|
await aiService.ensureAiObjectsInitialized()
|
||||||
|
|
||||||
aiRequest = AiCallRequest(
|
aiRequest = AiCallRequest(
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue