Compare commits
69 commits
main
...
feat/grafi
| Author | SHA1 | Date | |
|---|---|---|---|
| f6229b517e | |||
| 1e8a7ca854 | |||
| b54033722d | |||
| 9ec5d576f8 | |||
|
|
fac191fc77 | ||
|
|
df9a43c190 | ||
|
|
92dc6172f2 | ||
|
|
1aec73f110 | ||
|
|
ca6d8b9635 | ||
|
|
f2c3090070 | ||
|
|
35693a61e3 | ||
|
|
0403a19c22 | ||
|
|
130bdfb7cc | ||
|
|
73fe11230d | ||
|
|
513d879ae8 | ||
|
|
436547d47e | ||
|
|
e9c39f8e31 | ||
|
|
cfd303792f | ||
| dac9911f8b | |||
| 5455e09367 | |||
| 9ae2ffc415 | |||
| f184da9898 | |||
| e6ca6a9d8e | |||
| f96325f804 | |||
| 60b2fcf56b | |||
|
|
e07ac24fd8 | ||
|
|
64ee5200af | ||
|
|
e93ce71174 | ||
|
|
3da6e24bec | ||
|
|
d3d682fe4d | ||
|
|
7942766931 | ||
|
|
c140bd14d4 | ||
| 06d9910ecd | |||
|
|
b500bfa6c1 | ||
|
|
afd7e9d941 | ||
|
|
b12671bbb5 | ||
|
|
880fa4d787 | ||
| 72d3175f49 | |||
| ce671f61b6 | |||
| 4a840e9e6e | |||
| 93cb6939dc | |||
| 3add5c9a80 | |||
| 6a5ff1ff7c | |||
| dff3d41845 | |||
| a7f4055130 | |||
| 078b4eaaaf | |||
| 9d82d3d353 | |||
|
|
ba21005401 | ||
|
|
052647a52b | ||
|
|
49f3660d89 | ||
|
|
9816f13ae9 | ||
|
|
b405cebdec | ||
|
|
fb3a1f0a51 | ||
|
|
4d7ccb0418 | ||
|
|
d9fcea54ff | ||
|
|
e8abd553d0 | ||
|
|
30ea8bbefe | ||
|
|
96e2356ddd | ||
|
|
3507c16055 | ||
|
|
f8853d23ca | ||
|
|
d505ffd9cd | ||
|
|
60d5062204 | ||
|
|
564a1200c6 | ||
|
|
8221a0da3e | ||
|
|
24f0c3e2eb | ||
|
|
794ba36f27 | ||
|
|
b6be8f391e | ||
| dd2c771cb8 | |||
| e8adf18b0f |
368 changed files with 38701 additions and 7698 deletions
|
|
@ -29,10 +29,10 @@ ENV
|
|||
*.swo
|
||||
*~
|
||||
|
||||
# Environment files (env_gcp.env will be copied as .env by workflow)
|
||||
env_*.env
|
||||
# Environment files (env-gateway-*.env will be copied as .env by workflow)
|
||||
env-*.env
|
||||
.env.local
|
||||
# Note: .env is NOT ignored - it will be created from env_gcp.env by the workflow
|
||||
# Note: .env is NOT ignored - it will be created from env-gateway-*.env by the workflow
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
|
|
|
|||
|
|
@ -22,8 +22,8 @@ jobs:
|
|||
cd /srv/gateway/current &&
|
||||
git remote set-url origin ssh://git@git.poweron.swiss:2222/PowerOn/gateway.git &&
|
||||
git pull &&
|
||||
cp env_prod_forgejo.env .env &&
|
||||
rm -f env_*.env &&
|
||||
cp env-gateway-prod-forgejo.env .env &&
|
||||
rm -f env-*.env &&
|
||||
source .venv/bin/activate &&
|
||||
pip install -r requirements.txt --no-cache-dir &&
|
||||
sudo systemctl restart gateway
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ ENV
|
|||
*~
|
||||
|
||||
# Environment files (will be handled separately)
|
||||
env_*.env
|
||||
env-*.env
|
||||
.env.local
|
||||
|
||||
# Logs
|
||||
|
|
|
|||
16
.github/workflows/deploy-gcp.yml
vendored
16
.github/workflows/deploy-gcp.yml
vendored
|
|
@ -11,11 +11,11 @@
|
|||
# 2. Create secret "CONFIG_KEY" in Secret Manager with your master key
|
||||
# 3. Grant the service account access to Secret Manager secrets
|
||||
# 4. Create Cloud SQL instance (if not exists)
|
||||
# 5. Create env_prod.env and env_int.env files with your configuration
|
||||
# 5. Create env-gateway-prod.env and env-gateway-int.env files with your configuration
|
||||
#
|
||||
# Environment Selection:
|
||||
# - Push to 'main' branch → uses env_prod.env (production)
|
||||
# - Push to 'int' branch → uses env_int.env (integration)
|
||||
# - Push to 'main' branch → uses env-gateway-prod.env (production)
|
||||
# - Push to 'int' branch → uses env-gateway-int.env (integration)
|
||||
# - Manual dispatch → select environment (prod/int) to use corresponding env file
|
||||
|
||||
name: Deploy Gateway to Google Cloud Run
|
||||
|
|
@ -70,10 +70,10 @@ jobs:
|
|||
fi
|
||||
echo "env_type=$ENV_TYPE" >> $GITHUB_OUTPUT
|
||||
echo "service_name=gateway-$ENV_TYPE" >> $GITHUB_OUTPUT
|
||||
echo "env_file=env_${ENV_TYPE}.env" >> $GITHUB_OUTPUT
|
||||
echo "env_file=env-gateway-${ENV_TYPE}.env" >> $GITHUB_OUTPUT
|
||||
echo "Determined environment: $ENV_TYPE"
|
||||
echo "Service name: gateway-$ENV_TYPE"
|
||||
echo "Env file: env_${ENV_TYPE}.env"
|
||||
echo "Env file: env-gateway-${ENV_TYPE}.env"
|
||||
|
||||
- name: Authenticate to Google Cloud
|
||||
uses: google-github-actions/auth@v2
|
||||
|
|
@ -98,11 +98,11 @@ jobs:
|
|||
echo "Using $ENV_FILE"
|
||||
cp "$ENV_FILE" .env
|
||||
else
|
||||
echo "Warning: $ENV_FILE not found, using env_prod.env as fallback"
|
||||
cp env_prod.env .env
|
||||
echo "Warning: $ENV_FILE not found, using env-gateway-prod.env as fallback"
|
||||
cp env-gateway-prod.env .env
|
||||
fi
|
||||
# Clean up other env files (optional, for security)
|
||||
rm -f env_*.env
|
||||
rm -f env-*.env
|
||||
|
||||
- name: Build and push container image
|
||||
working-directory: ./gateway
|
||||
|
|
|
|||
4
.github/workflows/int_gateway-int.yml
vendored
4
.github/workflows/int_gateway-int.yml
vendored
|
|
@ -74,10 +74,10 @@ jobs:
|
|||
run: unzip release.zip
|
||||
|
||||
- name: Set productive environment
|
||||
run: cp env_int.env .env
|
||||
run: cp env-gateway-int.env .env
|
||||
|
||||
- name: Clean up environment files
|
||||
run: rm -f env_*.env
|
||||
run: rm -f env-*.env
|
||||
|
||||
- name: 'Deploy to Azure Web App'
|
||||
uses: azure/webapps-deploy@v3
|
||||
|
|
|
|||
4
.github/workflows/main_gateway-prod.yml
vendored
4
.github/workflows/main_gateway-prod.yml
vendored
|
|
@ -74,10 +74,10 @@ jobs:
|
|||
run: unzip release.zip
|
||||
|
||||
- name: Set productive environment
|
||||
run: cp env_prod.env .env
|
||||
run: cp env-gateway-prod.env .env
|
||||
|
||||
- name: Clean up environment files
|
||||
run: rm -f env_*.env
|
||||
run: rm -f env-*.env
|
||||
|
||||
- name: 'Deploy to Azure Web App'
|
||||
uses: azure/webapps-deploy@v3
|
||||
|
|
|
|||
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -131,7 +131,7 @@ env.bak/
|
|||
venv.bak/
|
||||
|
||||
# Don't ignore environment templates
|
||||
!env*.env
|
||||
!env-*.env
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
|
|
|
|||
|
|
@ -28,13 +28,13 @@ COPY requirements.lock .
|
|||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir -r requirements.lock
|
||||
|
||||
# Copy application code (includes .env file created by workflow from env_gcp.env)
|
||||
# Copy application code (includes .env file created by workflow from env-gateway-*.env)
|
||||
COPY . .
|
||||
|
||||
# Create directories for logs (Cloud Run uses /tmp for writable storage)
|
||||
RUN mkdir -p /tmp/logs /tmp/debug
|
||||
|
||||
# Note: .env file (created from env_gcp.env by workflow) contains encrypted secrets
|
||||
# Note: .env file (created from env-gateway-*.env by workflow) contains encrypted secrets
|
||||
# These are decrypted at runtime using the master key from Secret Manager
|
||||
# (mounted as CONFIG_KEY environment variable in Cloud Run)
|
||||
|
||||
|
|
|
|||
45
app.py
45
app.py
|
|
@ -294,6 +294,14 @@ except Exception as e:
|
|||
async def lifespan(app: FastAPI):
|
||||
logger.info("Application is starting up")
|
||||
|
||||
# Validate FK metadata on all Pydantic models (fail-fast, no silent fallbacks)
|
||||
from modules.shared.fkRegistry import validateFkTargets
|
||||
fkErrors = validateFkTargets()
|
||||
if fkErrors:
|
||||
for err in fkErrors:
|
||||
logger.error("FK metadata validation: %s", err)
|
||||
raise SystemExit(f"FK metadata validation failed ({len(fkErrors)} error(s)) — fix datamodels before starting")
|
||||
|
||||
# AI connectors already pre-warmed at module-load via _eager_prewarm() in aicoreModelRegistry.
|
||||
|
||||
# Bootstrap database if needed (creates initial users, mandates, roles, etc.)
|
||||
|
|
@ -327,9 +335,9 @@ async def lifespan(app: FastAPI):
|
|||
|
||||
# Sync gateway i18n registry to DB and load translation cache
|
||||
try:
|
||||
from modules.shared.i18nRegistry import _syncRegistryToDb, _loadCache
|
||||
await _syncRegistryToDb()
|
||||
await _loadCache()
|
||||
from modules.shared.i18nRegistry import syncRegistryToDb, loadCache
|
||||
await syncRegistryToDb()
|
||||
await loadCache()
|
||||
logger.info("i18n registry sync + cache load completed")
|
||||
except Exception as e:
|
||||
logger.warning(f"i18n registry sync failed (non-critical): {e}")
|
||||
|
|
@ -388,6 +396,10 @@ async def lifespan(app: FastAPI):
|
|||
from modules.shared.auditLogger import registerAuditLogCleanupScheduler
|
||||
registerAuditLogCleanupScheduler()
|
||||
|
||||
# Register enterprise subscription auto-renewal scheduler
|
||||
from modules.serviceCenter.services.serviceSubscription.enterpriseRenewalScheduler import registerEnterpriseRenewalScheduler
|
||||
registerEnterpriseRenewalScheduler()
|
||||
|
||||
# Recover background jobs that were RUNNING when the previous worker died
|
||||
try:
|
||||
from modules.serviceCenter.services.serviceBackgroundJobs.mainBackgroundJobService import (
|
||||
|
|
@ -397,6 +409,16 @@ async def lifespan(app: FastAPI):
|
|||
except Exception as e:
|
||||
logger.warning(f"BackgroundJob recovery failed (non-critical): {e}")
|
||||
|
||||
# Subscribe knowledge ingestion to connection lifecycle events so OAuth
|
||||
# connect/disconnect reliably trigger bootstrap/purge.
|
||||
try:
|
||||
from modules.serviceCenter.services.serviceKnowledge.subConnectorIngestConsumer import (
|
||||
registerKnowledgeIngestionConsumer,
|
||||
)
|
||||
registerKnowledgeIngestionConsumer()
|
||||
except Exception as e:
|
||||
logger.warning(f"KnowledgeIngestionConsumer registration failed (non-critical): {e}")
|
||||
|
||||
yield
|
||||
|
||||
# --- Stop Managers ---
|
||||
|
|
@ -522,15 +544,15 @@ from modules.auth import (
|
|||
# Per-request context middleware: language (Accept-Language) + user timezone (X-User-Timezone).
|
||||
# Both are written into ContextVars and consumed by t() / resolveText() and getRequestNow()
|
||||
# without having to thread them through every call site.
|
||||
from modules.shared.i18nRegistry import _setLanguage, normalizePrimaryLanguageTag
|
||||
from modules.shared.timeUtils import _setRequestTimezone
|
||||
from modules.shared.i18nRegistry import setLanguage, normalizePrimaryLanguageTag
|
||||
from modules.shared.timeUtils import setRequestTimezone
|
||||
|
||||
@app.middleware("http")
|
||||
async def _requestContextMiddleware(request: Request, call_next):
|
||||
acceptLang = request.headers.get("Accept-Language", "")
|
||||
lang = normalizePrimaryLanguageTag(acceptLang, "de")
|
||||
_setLanguage(lang)
|
||||
_setRequestTimezone(request.headers.get("X-User-Timezone", ""))
|
||||
setLanguage(lang)
|
||||
setRequestTimezone(request.headers.get("X-User-Timezone", ""))
|
||||
return await call_next(request)
|
||||
|
||||
app.add_middleware(CSRFMiddleware)
|
||||
|
|
@ -582,6 +604,9 @@ app.include_router(promptRouter)
|
|||
from modules.routes.routeDataConnections import router as connectionsRouter
|
||||
app.include_router(connectionsRouter)
|
||||
|
||||
from modules.routes.routeTableViews import router as tableViewsRouter
|
||||
app.include_router(tableViewsRouter)
|
||||
|
||||
from modules.routes.routeSecurityLocal import router as localRouter
|
||||
app.include_router(localRouter)
|
||||
|
||||
|
|
@ -594,6 +619,9 @@ app.include_router(googleRouter)
|
|||
from modules.routes.routeSecurityClickup import router as clickupRouter
|
||||
app.include_router(clickupRouter)
|
||||
|
||||
from modules.routes.routeSecurityInfomaniak import router as infomaniakRouter
|
||||
app.include_router(infomaniakRouter)
|
||||
|
||||
from modules.routes.routeClickup import router as clickupApiRouter
|
||||
app.include_router(clickupApiRouter)
|
||||
|
||||
|
|
@ -661,6 +689,9 @@ app.include_router(navigationRouter)
|
|||
from modules.routes.routeWorkflowDashboard import router as workflowDashboardRouter
|
||||
app.include_router(workflowDashboardRouter)
|
||||
|
||||
from modules.routes.routeAutomationWorkspace import router as automationWorkspaceRouter
|
||||
app.include_router(automationWorkspaceRouter)
|
||||
|
||||
# ============================================================================
|
||||
# PLUG&PLAY FEATURE ROUTERS
|
||||
# Dynamically load routers from feature containers in modules/features/
|
||||
|
|
|
|||
BIN
assets/fonts/NotoEmoji-Regular.ttf
Normal file
BIN
assets/fonts/NotoEmoji-Regular.ttf
Normal file
Binary file not shown.
|
|
@ -45,11 +45,6 @@ Connector_StacSwisstopo_MAX_RETRIES = 3
|
|||
Connector_StacSwisstopo_RETRY_DELAY = 1.0
|
||||
Connector_StacSwisstopo_ENABLE_CACHE = True
|
||||
|
||||
# Demo RMA credentials (same for all demo trustee instances)
|
||||
Demo_RMA_ApiBaseUrl = https://service.int.runmyaccounts.com/api/latest/clients/
|
||||
Demo_RMA_ClientName = poweronag
|
||||
Demo_RMA_ApiKey = pat_tipTbnHU26CrMzAnLSjCR_uzHJv4CDNa7obaQGHIA-4
|
||||
|
||||
# Operator company information (shown on invoice emails)
|
||||
Operator_CompanyName = PowerOn AG
|
||||
Operator_Address = Birmensdorferstrasse 94, 8003 Zürich
|
||||
|
|
|
|||
|
|
@ -37,7 +37,8 @@
|
|||
"y": 200,
|
||||
"title": "Pro Scan-Dokument",
|
||||
"parameters": {
|
||||
"level": 1,
|
||||
"items": {"type": "ref", "nodeId": "n2", "path": ["files"]},
|
||||
"level": "auto",
|
||||
"concurrency": 1
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ APP_JWT_KEY_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpERjlrSktmZHVuQnJ1VVJDdndLaUcxZGJsT2Z
|
|||
APP_TOKEN_EXPIRY=300
|
||||
|
||||
# CORS Configuration
|
||||
APP_ALLOWED_ORIGINS=http://localhost:8080,http://localhost:5176,https://playground.poweron-center.net
|
||||
APP_ALLOWED_ORIGINS=http://localhost:8080,http://localhost:5176,https://playground.poweron.swiss
|
||||
|
||||
# Logging configuration
|
||||
APP_LOGGING_LOG_LEVEL = DEBUG
|
||||
|
|
@ -51,6 +51,8 @@ Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
|||
Service_CLICKUP_CLIENT_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd4ZWVBeHVtRnpIT0VBN0tSZDhLRmFmN05DOVBOelJtLWhkVnJDRVBqUkh3bDFTZFRWaWQ1cWowdGNLUk5IQzlGN1J6RFVCaW8zRnBwLVBnclJfdWgxV3pVRzFEV2lwcW5Rc19Xa1ROWXNJcUF0ajZaYUxOUXk0WHRsRmJLM25FaHV5T2IxdV92ZW1nRjhzaGpwU0l2Wm9FTkRnY2lJVjhuNHUwT29salAxYV8wPQ==
|
||||
Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/connect/callback
|
||||
|
||||
# Infomaniak: no OAuth client. Users paste a Personal Access Token (kdrive + mail) per UI.
|
||||
|
||||
# Stripe Billing (both end with _SECRET for encryption script)
|
||||
STRIPE_SECRET_KEY_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5aHNGejgzQmpTdmprdzQxR19KZkh3MlhYUTNseFN3WnlaWjh2SDZyalN6aU9xSktkbUQwUnZrVnlvbGVRQm4yZFdiRU5aSEk5WVJuUnR4VUwtTm9OVk1WWmJQeU5QaDdib0hfVWV5U1BfYTFXRmdoOWdnOWxkb3JFQmF3bm45UjFUVUxmWGtGRkFKUGd6bmhpQlFnaVI3Q2lLdDlsY1VESk1vOEM0ZFBJNW1qcVZ0N2tPYmRLNmVKajZ2M3o3S05lWnRRVG5LdkRseW4wQ3VjNHNQZTZUdz09
|
||||
STRIPE_WEBHOOK_SECRET = DEV_ENC:Z0FBQUFBQnB5dkd5dDJMSHBrVk8wTzJhU2xzTTZCZWdvWmU2NGI2WklfRXRJZVUzaVYyOU9GLUZsalUwa2lPdEgtUHo0dVVvRDU1cy1saHJyU0Rxa2xQZjBuakExQzk3bmxBcU9WbEIxUEtpR1JoUFMxZG9ISGRZUXFhdFpSMGxvQUV3a0VLQllfUUtCOHZwTGdteV9rYTFOazBfSlN3ekNWblFpakJlZVlCTmNkWWQ4Sm01a1RCWTlnTlFHWVA0MkZYMlprUExrWFN2V0NVU1BTd1NKczFJbVo3VHpLdlc4UT09
|
||||
|
|
@ -77,7 +79,7 @@ Feature_SyncDelta_JIRA_DELTA_TOKEN_SECRET = DEV_ENC:Z0FBQUFBQm8xSUpEbm0yRUJ6VUJK
|
|||
# Teamsbot Browser Bot Service
|
||||
# For local testing: run the bot locally with `npm run dev` in service-teams-browser-bot
|
||||
# The bot will connect back to localhost:8000 via WebSocket
|
||||
TEAMSBOT_BROWSER_BOT_URL = https://cae-poweron-shared.redwater-53d21339.switzerlandnorth.azurecontainerapps.io
|
||||
TEAMSBOT_BROWSER_BOT_URL = http://localhost:4100
|
||||
|
||||
# Debug Configuration
|
||||
APP_DEBUG_CHAT_WORKFLOW_ENABLED = True
|
||||
|
|
@ -3,7 +3,9 @@
|
|||
# System Configuration
|
||||
APP_ENV_TYPE = int
|
||||
APP_ENV_LABEL = Integration Instance
|
||||
APP_API_URL = https://gateway-int.poweron-center.net
|
||||
APP_API_URL = https://gateway-int.poweron.swiss
|
||||
# Force SameSite=None+Secure for auth cookies (cross-site UI on poweron-center.net). Optional if APP_API_URL is https://
|
||||
APP_COOKIE_SECURE = true
|
||||
APP_KEY_SYSVAR = CONFIG_KEY
|
||||
APP_INIT_PASS_ADMIN_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjWm41MWZ4TUZGaVlrX3pWZWNwakJsY3Facm0wLVZDd1VKeTFoZEVZQnItcEdUUnVJS1NXeDBpM2xKbGRsYmxOSmRhc29PZjJSU2txQjdLbUVrTTE1NEJjUXBHbV9NOVJWZUR3QlJkQnJvTEU9
|
||||
APP_INIT_PASS_EVENT_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjdmtrakgxa0djekZVNGtTZV8wM2I5UUpCZllveVBMWXROYk5yS3BiV3JEelJSM09VYTRONHpnY3VtMGxDRk5JTEZSRFhtcDZ0RVRmZ1RicTFhb3c5dVZRQ1o4SmlkLVpPTW5MMTU2eTQ0Vkk9
|
||||
|
|
@ -19,7 +21,7 @@ APP_JWT_KEY_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjNUctb2RwU25iR3ZnanBOdHZhWUtIajZ1RnZ
|
|||
APP_TOKEN_EXPIRY=300
|
||||
|
||||
# CORS Configuration
|
||||
APP_ALLOWED_ORIGINS=http://localhost:8080,https://playground.poweron-center.net,https://playground-int.poweron-center.net,http://localhost:5176,https://nyla.poweron-center.net, https://nyla-int.poweron-center.net
|
||||
APP_ALLOWED_ORIGINS=http://localhost:8080,http://localhost:5176,https://playground.poweron.swiss,https://playground-int.poweron.swiss,https://nyla.poweron.swiss,https://nyla-int.poweron.swiss,https://nyla.poweron-center.net,https://nyla-int.poweron-center.net
|
||||
|
||||
# Logging configuration
|
||||
APP_LOGGING_LOG_LEVEL = DEBUG
|
||||
|
|
@ -34,26 +36,28 @@ APP_LOGGING_BACKUP_COUNT = 5
|
|||
# OAuth: Auth app (login/JWT) vs Data app (Graph / Google APIs)
|
||||
Service_MSFT_AUTH_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
|
||||
Service_MSFT_AUTH_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm83T29rMDZvcV9qTG5xb1FzUkdqS1llbzRxSEJXbmpONFFtcUtfZXdtZjQybmJSMjBjMEpnRVhiOGRuczZvVFBFdVVTQV80SG9PSnRQTEpLdVViNm5wc2E5aGRLWjZ4TGF1QjVkNmdRSzBpNWNkYXVublFYclVEdEM5TVBBZWVVMW5RVWk=
|
||||
Service_MSFT_AUTH_REDIRECT_URI = https://gateway-int.poweron-center.net/api/msft/auth/login/callback
|
||||
Service_MSFT_AUTH_REDIRECT_URI = https://gateway-int.poweron.swiss/api/msft/auth/login/callback
|
||||
Service_MSFT_DATA_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
|
||||
Service_MSFT_DATA_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm83T29rMDZvcV9qTG5xb1FzUkdqS1llbzRxSEJXbmpONFFtcUtfZXdtZjQybmJSMjBjMEpnRVhiOGRuczZvVFBFdVVTQV80SG9PSnRQTEpLdVViNm5wc2E5aGRLWjZ4TGF1QjVkNmdRSzBpNWNkYXVublFYclVEdEM5TVBBZWVVMW5RVWk=
|
||||
Service_MSFT_DATA_REDIRECT_URI = https://gateway-int.poweron-center.net/api/msft/auth/connect/callback
|
||||
Service_MSFT_DATA_REDIRECT_URI = https://gateway-int.poweron.swiss/api/msft/auth/connect/callback
|
||||
|
||||
Service_GOOGLE_AUTH_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
|
||||
Service_GOOGLE_AUTH_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjNThGeVRNd3hacThtRnE0bzlDa0JPUWQyaEd6QjlFckdsMGZjRlRfUks2bXV3aDdVRTF3LVRlZVY5WjVzSXV4ZGNnX002RDl3dkNYdGFzZkxVUW01My1wTHRCanVCLUozZEx4TlduQlB5MnpvNTR2SGlvbFl1YkhzTEtsSi1SOEo=
|
||||
Service_GOOGLE_AUTH_REDIRECT_URI = https://gateway-int.poweron-center.net/api/google/auth/login/callback
|
||||
Service_GOOGLE_AUTH_REDIRECT_URI = https://gateway-int.poweron.swiss/api/google/auth/login/callback
|
||||
Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
|
||||
Service_GOOGLE_DATA_CLIENT_SECRET = INT_ENC:Z0FBQUFBQm8xSVRjNThGeVRNd3hacThtRnE0bzlDa0JPUWQyaEd6QjlFckdsMGZjRlRfUks2bXV3aDdVRTF3LVRlZVY5WjVzSXV4ZGNnX002RDl3dkNYdGFzZkxVUW01My1wTHRCanVCLUozZEx4TlduQlB5MnpvNTR2SGlvbFl1YkhzTEtsSi1SOEo=
|
||||
Service_GOOGLE_DATA_REDIRECT_URI = https://gateway-int.poweron-center.net/api/google/auth/connect/callback
|
||||
Service_GOOGLE_DATA_REDIRECT_URI = https://gateway-int.poweron.swiss/api/google/auth/connect/callback
|
||||
|
||||
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
||||
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||
Service_CLICKUP_CLIENT_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5SE1uVURMNVE3NkM4cHBKa2R2TjBnLWdpSXI5dHpKWGExZVFiUF95TFNnZ1NwLWFLdmh6eWFZTHVHYTBzU2FGRUpLYkVyM1NvZjZkWDZHN21qUER5ZVNOaGpCc3NrUGd3VnFTclF3OW1nUlVuWXQ1UVhDLVpyb1BwRExOeFpDeVhtbEhDVnd4TVdpbzNBNk5QQWFPdjdza0xBWGxFY1E3WFpCSUlNa1l4RDlBPQ==
|
||||
Service_CLICKUP_OAUTH_REDIRECT_URI = http://localhost:8000/api/clickup/auth/connect/callback
|
||||
Service_CLICKUP_OAUTH_REDIRECT_URI = https://gateway-int.poweron.swiss/api/clickup/auth/connect/callback
|
||||
|
||||
# Infomaniak: no OAuth client. Users paste a Personal Access Token (kdrive + mail) per UI.
|
||||
|
||||
# Stripe Billing (both end with _SECRET for encryption script)
|
||||
STRIPE_SECRET_KEY_SECRET = INT_ENC:Z0FBQUFBQnB5dkd5ekdBaGNGVUlOQUpncTlzLWlTV0V5OWZzQkpDczhCUGw4U1JpTHZ0d3pfYlFNWElLRlNiNlNsaDRYTGZUTkg2OUFrTW1GZXpOUjBVbmRQWjN6ekhHd2ZSQ195OHlaeWh1TmxrUm10V2R3YmdncmFLbFMzVjdqcWJMSUJPR2xuSEozclNoZG1rZVBTaWg3OFQ1Qzdxb0wyQ2RKazc2dG1aZXBUTXlvbDZqLS1KOVI5M3BGc3NQZkZRbnFpRjIwWmh2ZHlVNlpxZVo2dWNmMjQ5eW02QmtzUT09
|
||||
STRIPE_WEBHOOK_SECRET = whsec_2agCQEbDPSOn2C40EJcwoPCqlvaPLF7M
|
||||
STRIPE_WEBHOOK_SECRET = INT_ENC:Z0FBQUFBQnA4UXZiUUVqTl9lREVRWTh1aHFDcFpwcXRkOUx4MS1ham9Ddkl6T0xzMnJuM1hhUHdGNG5CenY1MUg4RlJBOGFQTWl5cVd5MjJ2REItcHYyRmdLX3ZlT2p5Z3BRVkMtQnRoTVkteXlfaU92MVBtOEI0Ni1kbGlfa0NiRmFRRXNHLVE2NHI=
|
||||
STRIPE_API_VERSION = 2026-01-28.clover
|
||||
STRIPE_AUTOMATIC_TAX_ENABLED = false
|
||||
STRIPE_TAX_RATE_ID_CH_VAT = txr_1TOQd14OUoIL0Osj7A0ZQlr0
|
||||
|
|
@ -11,7 +11,7 @@ APP_API_URL = https://api.poweron.swiss
|
|||
# PostgreSQL DB Host
|
||||
DB_HOST=10.20.0.21
|
||||
DB_USER=poweron_dev
|
||||
DB_PASSWORD_SECRET = mypassword
|
||||
DB_PASSWORD_SECRET = PROD_ENC:Z0FBQUFBQnA4UXZiMnRoUzVlbVRLX3JTRl94cVpMaURtMndZVmFBYXdvdnIxLV81dWwxWmhmcUlCMUFZbDhRT2NsQmNqSl9ZMmRWRVN1Y2JqNlVwOXRJY1VBTm1oSjNiaFE9PQ==
|
||||
DB_PORT=5432
|
||||
|
||||
# Security Configuration
|
||||
|
|
@ -51,6 +51,8 @@ Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
|||
Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
|
||||
Service_CLICKUP_OAUTH_REDIRECT_URI = https://api.poweron.swiss/api/clickup/auth/connect/callback
|
||||
|
||||
# Infomaniak: no OAuth client. Users paste a Personal Access Token (kdrive + mail) per UI.
|
||||
|
||||
# Stripe Billing (both end with _SECRET for encryption script)
|
||||
STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
|
||||
STRIPE_WEBHOOK_SECRET = PROD_ENC:Z0FBQUFBQnBudkpGNUpTWldsakYydFhFelBrR1lSaWxYT3kyMENOMUljZTJUZHBWcEhhdWVCMzYxZXQ5b3VlTFVRalFiTVdsbGxrdUx0RDFwSEpsOC1sTDJRTEJNQlA3S3ZaQzBtV1h6bWp5VnlMZUgwUlF3cXYxcnljZVE5SWdzLVg3V0syOWRYS08=
|
||||
|
|
@ -6,7 +6,8 @@ APP_ENV_LABEL = Production Instance
|
|||
APP_KEY_SYSVAR = CONFIG_KEY
|
||||
APP_INIT_PASS_ADMIN_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3UnJRV0sySFlDblpXUlREclREaW1WbUt6bGtQYkdrNkZDOXNOLXFua1hqeFF2RHJnRXJ5VlVGV3hOZm41QjZOMlNTb0duYXNxZi05dXVTc2xDVkx0SVBFLUhncVo5T0VUZHE0UTZLWWw3ck09
|
||||
APP_INIT_PASS_EVENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3QVpIY19DQVZSSzJmc2F0VEZvQlU1cHBhTEgxdHdnR3g4eW01aTEzYTUxc1gxTDR1RVVpSHRXYjV6N1BLZUdCUGlfOW1qdy0xSHFVRkNBcGZvaGlSSkZycXRuUllaWnpyVGRoeFg1dGEyNUk9
|
||||
APP_API_URL = https://gateway-prod.poweron-center.net
|
||||
APP_API_URL = https://gateway-prod.poweron.swiss
|
||||
APP_COOKIE_SECURE = true
|
||||
|
||||
# PostgreSQL DB Host
|
||||
DB_HOST=gateway-prod-server.postgres.database.azure.com
|
||||
|
|
@ -19,7 +20,7 @@ APP_JWT_KEY_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3elhfV0Rnd2pQRjlMdkVwX1FnSmRhSzNZUl
|
|||
APP_TOKEN_EXPIRY=300
|
||||
|
||||
# CORS Configuration
|
||||
APP_ALLOWED_ORIGINS=http://localhost:8080,https://playground.poweron-center.net,https://playground-int.poweron-center.net,http://localhost:5176,https://nyla.poweron-center.net,https://nyla-int.poweron-center.net
|
||||
APP_ALLOWED_ORIGINS=http://localhost:8080,http://localhost:5176,https://playground.poweron.swiss,https://playground-int.poweron.swiss,https://nyla.poweron.swiss,https://nyla-int.poweron.swiss,https://nyla.poweron-center.net,https://nyla-int.poweron-center.net
|
||||
|
||||
# Logging configuration
|
||||
APP_LOGGING_LOG_LEVEL = DEBUG
|
||||
|
|
@ -34,22 +35,24 @@ APP_LOGGING_BACKUP_COUNT = 5
|
|||
# OAuth: Auth app (login/JWT) vs Data app (Graph / Google APIs)
|
||||
Service_MSFT_AUTH_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
|
||||
Service_MSFT_AUTH_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBESkk2T25scFU1T1pNd2FENTFRM3kzcEpSXy1HT0trQkR2Wnl3U3RYbExzRy1YUTkxd3lPZE84U2lhX3FZanp5TjhYRGluLXVjU3hjaWRBUnZLbVhtRDItZ3FxNXJ3MUxicUZTXzJWZVNrR0VKN3ZlNEtET1ppOFk0MzNmbkwyRmROUk4=
|
||||
Service_MSFT_AUTH_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/msft/auth/login/callback
|
||||
Service_MSFT_AUTH_REDIRECT_URI = https://gateway-prod.poweron.swiss/api/msft/auth/login/callback
|
||||
Service_MSFT_DATA_CLIENT_ID = c7e7112d-61dc-4f3a-8cd3-08cc4cd7504c
|
||||
Service_MSFT_DATA_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBESkk2T25scFU1T1pNd2FENTFRM3kzcEpSXy1HT0trQkR2Wnl3U3RYbExzRy1YUTkxd3lPZE84U2lhX3FZanp5TjhYRGluLXVjU3hjaWRBUnZLbVhtRDItZ3FxNXJ3MUxicUZTXzJWZVNrR0VKN3ZlNEtET1ppOFk0MzNmbkwyRmROUk4=
|
||||
Service_MSFT_DATA_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/msft/auth/connect/callback
|
||||
Service_MSFT_DATA_REDIRECT_URI = https://gateway-prod.poweron.swiss/api/msft/auth/connect/callback
|
||||
|
||||
Service_GOOGLE_AUTH_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
|
||||
Service_GOOGLE_AUTH_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3eWFwSEZ4YnRJcjU1OW5kcXZKdkt1Z3gzWDFhVW5Eelh3VnpnNlppcWxweHY5UUQzeDIyVk83cW1XNVE4bllVWnR2MjlSQzFrV1UyUVV6OUt5b3Vqa3QzMUIwNFBqc2FVSXRxTlQ1OHVJZVFibnhBQ2puXzBwSXp5NUZhZjM1d1o=
|
||||
Service_GOOGLE_AUTH_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/google/auth/login/callback
|
||||
Service_GOOGLE_AUTH_REDIRECT_URI = https://gateway-prod.poweron.swiss/api/google/auth/login/callback
|
||||
Service_GOOGLE_DATA_CLIENT_ID = 354925410565-aqs2b2qaiqmm73qpjnel6al8eid78uvg.apps.googleusercontent.com
|
||||
Service_GOOGLE_DATA_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnBDM1Z3eWFwSEZ4YnRJcjU1OW5kcXZKdkt1Z3gzWDFhVW5Eelh3VnpnNlppcWxweHY5UUQzeDIyVk83cW1XNVE4bllVWnR2MjlSQzFrV1UyUVV6OUt5b3Vqa3QzMUIwNFBqc2FVSXRxTlQ1OHVJZVFibnhBQ2puXzBwSXp5NUZhZjM1d1o=
|
||||
Service_GOOGLE_DATA_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/google/auth/connect/callback
|
||||
Service_GOOGLE_DATA_REDIRECT_URI = https://gateway-prod.poweron.swiss/api/google/auth/connect/callback
|
||||
|
||||
# ClickUp OAuth (Verbindungen / automation). Create an app in ClickUp: Settings → Apps → API; set redirect URL to Service_CLICKUP_OAUTH_REDIRECT_URI exactly.
|
||||
Service_CLICKUP_CLIENT_ID = O3FX3H602A30MQN4I4SBNGJLIDBD5SL4
|
||||
Service_CLICKUP_CLIENT_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6VGw5WDdhdDRsVENSalhSSUV0OFFxbEx0V1l6aktNV0E5Y18xU3JHLUlqMWVJdmxyajAydVZRaDJkZzJOVXhxRV9ROFRZbWxlRjh4c3NtQnRFMmRtZWpzTWVsdngtWldlNXRKTURHQjJCOEt6alMwQlkwOFYyVVJWNURJUGJIZDIxYVlfNnBrMU54M0Q3TVdVbFZqRkJKTUtqa05wUkV4eGZvbXNsVi1nNVdBPQ==
|
||||
Service_CLICKUP_OAUTH_REDIRECT_URI = https://gateway-prod.poweron-center.net/api/clickup/auth/connect/callback
|
||||
Service_CLICKUP_OAUTH_REDIRECT_URI = https://gateway-prod.poweron.swiss/api/clickup/auth/connect/callback
|
||||
|
||||
# Infomaniak: no OAuth client. Users paste a Personal Access Token (kdrive + mail) per UI.
|
||||
|
||||
# Stripe Billing (both end with _SECRET for encryption script)
|
||||
STRIPE_SECRET_KEY_SECRET = PROD_ENC:Z0FBQUFBQnB5dkd6aVA3R3VRS3VHMUgzUEVjYkR4eUZKWFhPUzFTTVlHNnBvT3FienNQaUlBWVpPLXJyVGpGMWk4LXktMXphX0J6ZTVESkJxdjNNa3ZJbF9wX2ppYzdjYlF0cmdVamlEWWJDSmJYYkJseHctTlh4dnNoQWs4SG5haVl2TTNDdXpuaFpqeDBtNkFCbUxMa0RaWG14dmxyOEdILTNrZ2licmNpbXVkN2lFSWoxZW1BODNpV0ZTQ0VaeXRmR1d4RjExMlVFS3MtQU9zZXZlZE1mTmY3OWctUXJHdz09
|
||||
|
|
@ -9,6 +9,7 @@ import logging
|
|||
import importlib
|
||||
import os
|
||||
import time
|
||||
import threading
|
||||
from typing import Dict, List, Optional, Any, Tuple
|
||||
from modules.datamodels.datamodelAi import AiModel
|
||||
from .aicoreBase import BaseConnectorAi
|
||||
|
|
@ -31,11 +32,37 @@ class ModelRegistry:
|
|||
self._connectors: Dict[str, BaseConnectorAi] = {}
|
||||
self._lastRefresh: Optional[float] = None
|
||||
self._refreshInterval: float = 300.0 # 5 minutes
|
||||
self._refreshLock = threading.Lock()
|
||||
self._connectorsInitialized: bool = False
|
||||
self._discoveredConnectorsCache: Optional[List[BaseConnectorAi]] = None # Avoid re-instantiating on every discoverConnectors() call
|
||||
self._getAvailableModelsCache: Dict[Tuple[str, int], Tuple[List[AiModel], float]] = {} # (user_id, rbac_id) -> (models, ts)
|
||||
self._getAvailableModelsCacheTtl: float = 30.0 # seconds
|
||||
|
||||
def _addModelToDict(self, model: AiModel, connectorType: str, target: Dict[str, AiModel]):
|
||||
"""Add model to a dict, tolerating benign re-adds from the same connector."""
|
||||
if model.displayName in target:
|
||||
existing = target[model.displayName]
|
||||
if existing.name == model.name and existing.connectorType == model.connectorType:
|
||||
logger.debug(f"Skipping duplicate model '{model.displayName}' from same connector {connectorType}")
|
||||
return
|
||||
raise ValueError(
|
||||
f"displayName conflict '{model.displayName}': "
|
||||
f"existing name='{existing.name}' (connector: {existing.connectorType}), "
|
||||
f"new name='{model.name}' (connector: {connectorType})"
|
||||
)
|
||||
|
||||
if TESTING_MAX_TOKENS_OVERRIDE is not None and model.maxTokens > TESTING_MAX_TOKENS_OVERRIDE:
|
||||
originalMaxTokens = model.maxTokens
|
||||
model.maxTokens = TESTING_MAX_TOKENS_OVERRIDE
|
||||
logger.debug(f"TESTING: Overrode maxTokens for {model.displayName}: {originalMaxTokens} -> {TESTING_MAX_TOKENS_OVERRIDE}")
|
||||
|
||||
target[model.displayName] = model
|
||||
logger.debug(f"Registered model: {model.displayName} (name: {model.name}) from {connectorType}")
|
||||
|
||||
def _addModel(self, model: AiModel, connectorType: str):
|
||||
"""Convenience wrapper for adding to self._models."""
|
||||
self._addModelToDict(model, connectorType, self._models)
|
||||
|
||||
def registerConnector(self, connector: BaseConnectorAi):
|
||||
"""Register a connector and collect its models."""
|
||||
connectorType = connector.getConnectorType()
|
||||
|
|
@ -47,26 +74,10 @@ class ModelRegistry:
|
|||
|
||||
self._connectors[connectorType] = connector
|
||||
|
||||
# Collect models from this connector
|
||||
try:
|
||||
models = connector.getCachedModels()
|
||||
for model in models:
|
||||
# Validate displayName uniqueness
|
||||
if model.displayName in self._models:
|
||||
existingModel = self._models[model.displayName]
|
||||
errorMsg = f"Duplicate displayName '{model.displayName}' detected! Existing model: displayName='{existingModel.displayName}', name='{existingModel.name}' (connector: {existingModel.connectorType}), New model: displayName='{model.displayName}', name='{model.name}' (connector: {connectorType}). displayName must be unique."
|
||||
logger.error(errorMsg)
|
||||
raise ValueError(errorMsg)
|
||||
|
||||
# TODO TESTING: Override maxTokens if testing override is enabled
|
||||
if TESTING_MAX_TOKENS_OVERRIDE is not None and model.maxTokens > TESTING_MAX_TOKENS_OVERRIDE:
|
||||
originalMaxTokens = model.maxTokens
|
||||
model.maxTokens = TESTING_MAX_TOKENS_OVERRIDE
|
||||
logger.debug(f"TESTING: Overrode maxTokens for {model.displayName}: {originalMaxTokens} -> {TESTING_MAX_TOKENS_OVERRIDE}")
|
||||
|
||||
# Use displayName as the key (must be unique)
|
||||
self._models[model.displayName] = model
|
||||
logger.debug(f"Registered model: {model.displayName} (name: {model.name}) from {connectorType}")
|
||||
self._addModel(model, connectorType)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to register models from {connectorType}: {e}")
|
||||
raise
|
||||
|
|
@ -116,51 +127,40 @@ class ModelRegistry:
|
|||
self._connectorsInitialized = True
|
||||
|
||||
def refreshModels(self, force: bool = False):
|
||||
"""Refresh models from all registered connectors."""
|
||||
import time
|
||||
|
||||
"""Refresh models from all registered connectors. Thread-safe via _refreshLock."""
|
||||
self.ensureConnectorsRegistered()
|
||||
|
||||
currentTime = time.time()
|
||||
|
||||
# Check if refresh is needed
|
||||
if (not force and
|
||||
self._lastRefresh is not None and
|
||||
currentTime - self._lastRefresh < self._refreshInterval):
|
||||
return
|
||||
|
||||
logger.info("Refreshing model registry...")
|
||||
if not self._refreshLock.acquire(blocking=False):
|
||||
logger.debug("refreshModels already running in another thread, skipping")
|
||||
return
|
||||
|
||||
# Clear existing models
|
||||
self._models.clear()
|
||||
try:
|
||||
logger.info("Refreshing model registry...")
|
||||
newModels: Dict[str, AiModel] = {}
|
||||
|
||||
# Re-register all connectors
|
||||
for connector in self._connectors.values():
|
||||
try:
|
||||
connector.clearCache() # Clear connector cache
|
||||
models = connector.getCachedModels()
|
||||
for model in models:
|
||||
# Validate displayName uniqueness
|
||||
if model.displayName in self._models:
|
||||
existingModel = self._models[model.displayName]
|
||||
errorMsg = f"Duplicate displayName '{model.displayName}' detected! Existing model: displayName='{existingModel.displayName}', name='{existingModel.name}' (connector: {existingModel.connectorType}), New model: displayName='{model.displayName}', name='{model.name}' (connector: {connector.getConnectorType()}). displayName must be unique."
|
||||
logger.error(errorMsg)
|
||||
raise ValueError(errorMsg)
|
||||
for connector in self._connectors.values():
|
||||
connectorType = connector.getConnectorType()
|
||||
try:
|
||||
connector.clearCache()
|
||||
models = connector.getCachedModels()
|
||||
for model in models:
|
||||
self._addModelToDict(model, connectorType, newModels)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to refresh models from {connectorType}: {e}")
|
||||
raise
|
||||
|
||||
# TODO TESTING: Override maxTokens if testing override is enabled
|
||||
if TESTING_MAX_TOKENS_OVERRIDE is not None and model.maxTokens > TESTING_MAX_TOKENS_OVERRIDE:
|
||||
originalMaxTokens = model.maxTokens
|
||||
model.maxTokens = TESTING_MAX_TOKENS_OVERRIDE
|
||||
logger.debug(f"TESTING: Overrode maxTokens for {model.displayName}: {originalMaxTokens} -> {TESTING_MAX_TOKENS_OVERRIDE}")
|
||||
|
||||
# Use displayName as the key (must be unique)
|
||||
self._models[model.displayName] = model
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to refresh models from {connector.getConnectorType()}: {e}")
|
||||
raise
|
||||
|
||||
self._lastRefresh = currentTime
|
||||
logger.info(f"Model registry refreshed: {len(self._models)} models available")
|
||||
self._models = newModels
|
||||
self._lastRefresh = time.time()
|
||||
logger.info(f"Model registry refreshed: {len(self._models)} models available")
|
||||
finally:
|
||||
self._refreshLock.release()
|
||||
|
||||
def getModel(self, displayName: str) -> Optional[AiModel]:
|
||||
"""Get a specific model by displayName (displayName must be unique)."""
|
||||
|
|
|
|||
|
|
@ -272,7 +272,9 @@ class ModelSelector:
|
|||
return 1.0
|
||||
|
||||
elif requestedPriority == PriorityEnum.SPEED:
|
||||
return model.speedRating / 10.0
|
||||
# Scale to same magnitude as operation type (x1000) so speed
|
||||
# can meaningfully influence model ranking across tiers.
|
||||
return model.speedRating * 100.0
|
||||
|
||||
elif requestedPriority == PriorityEnum.QUALITY:
|
||||
return model.qualityRating / 10.0
|
||||
|
|
|
|||
|
|
@ -13,6 +13,35 @@ from modules.datamodels.datamodelAi import AiModel, PriorityEnum, ProcessingMode
|
|||
# Configure logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _supportsCustomTemperature(modelName: str) -> bool:
|
||||
"""Check whether an Anthropic model accepts a custom ``temperature``.
|
||||
|
||||
Anthropic's Extended-Thinking models (Claude 4.7 Opus and the
|
||||
upcoming 4.7 Sonnet/Haiku, plus all 5.x and beyond) reject every
|
||||
``temperature`` value with HTTP 400
|
||||
``{"error": "`temperature` is deprecated for this model."}`` --
|
||||
only the model's internal default is accepted. Older Claude 4.5 /
|
||||
4.6 models still accept any value in [0, 1].
|
||||
|
||||
Returns:
|
||||
True if ``temperature`` may be sent; False if it must be omitted.
|
||||
"""
|
||||
if not modelName:
|
||||
return True
|
||||
name = modelName.lower()
|
||||
if name.startswith("claude-opus-4-7"):
|
||||
return False
|
||||
if name.startswith("claude-sonnet-4-7"):
|
||||
return False
|
||||
if name.startswith("claude-haiku-4-7"):
|
||||
return False
|
||||
# 5.x and beyond: same Extended-Thinking family, no custom temperature.
|
||||
if name.startswith("claude-opus-5") or name.startswith("claude-sonnet-5") or name.startswith("claude-haiku-5"):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def loadConfigData():
|
||||
"""Load configuration data for Anthropic connector"""
|
||||
return {
|
||||
|
|
@ -49,6 +78,102 @@ class AiAnthropic(BaseConnectorAi):
|
|||
def getModels(self) -> List[AiModel]:
|
||||
# Get all available Anthropic models.
|
||||
return [
|
||||
AiModel(
|
||||
name="claude-opus-4-7",
|
||||
displayName="Anthropic Claude Opus 4.7",
|
||||
connectorType="anthropic",
|
||||
apiUrl="https://api.anthropic.com/v1/messages",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=1000000,
|
||||
costPer1kTokensInput=0.005, # $5/M tokens (Anthropic API, 2026-04)
|
||||
costPer1kTokensOutput=0.025, # $25/M tokens
|
||||
speedRating=5,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.QUALITY,
|
||||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 10),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 9),
|
||||
(OperationTypeEnum.DATA_GENERATE, 10),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 9),
|
||||
(OperationTypeEnum.AGENT, 10),
|
||||
(OperationTypeEnum.DATA_QUERY, 3),
|
||||
),
|
||||
version="claude-opus-4-7",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.025
|
||||
),
|
||||
AiModel(
|
||||
name="claude-sonnet-4-6",
|
||||
displayName="Anthropic Claude Sonnet 4.6",
|
||||
connectorType="anthropic",
|
||||
apiUrl="https://api.anthropic.com/v1/messages",
|
||||
temperature=0.2,
|
||||
maxTokens=64000,
|
||||
contextLength=1000000,
|
||||
costPer1kTokensInput=0.003, # $3/M tokens
|
||||
costPer1kTokensOutput=0.015, # $15/M tokens
|
||||
speedRating=7,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.BALANCED,
|
||||
processingMode=ProcessingModeEnum.ADVANCED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 9),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 9),
|
||||
(OperationTypeEnum.DATA_GENERATE, 9),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||
(OperationTypeEnum.AGENT, 9),
|
||||
(OperationTypeEnum.DATA_QUERY, 9),
|
||||
),
|
||||
version="claude-sonnet-4-6",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.003 + (bytesReceived / 4 / 1000) * 0.015
|
||||
),
|
||||
AiModel(
|
||||
name="claude-opus-4-7",
|
||||
displayName="Anthropic Claude Opus 4.7 Vision",
|
||||
connectorType="anthropic",
|
||||
apiUrl="https://api.anthropic.com/v1/messages",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=1000000,
|
||||
costPer1kTokensInput=0.005,
|
||||
costPer1kTokensOutput=0.025,
|
||||
speedRating=5,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiImage,
|
||||
priority=PriorityEnum.QUALITY,
|
||||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.IMAGE_ANALYSE, 10)
|
||||
),
|
||||
version="claude-opus-4-7",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.025
|
||||
),
|
||||
AiModel(
|
||||
name="claude-sonnet-4-6",
|
||||
displayName="Anthropic Claude Sonnet 4.6 Vision",
|
||||
connectorType="anthropic",
|
||||
apiUrl="https://api.anthropic.com/v1/messages",
|
||||
temperature=0.2,
|
||||
maxTokens=64000,
|
||||
contextLength=1000000,
|
||||
costPer1kTokensInput=0.003,
|
||||
costPer1kTokensOutput=0.015,
|
||||
speedRating=6,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiImage,
|
||||
priority=PriorityEnum.QUALITY,
|
||||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.IMAGE_ANALYSE, 10)
|
||||
),
|
||||
version="claude-sonnet-4-6",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.003 + (bytesReceived / 4 / 1000) * 0.015
|
||||
),
|
||||
AiModel(
|
||||
name="claude-sonnet-4-5-20250929",
|
||||
displayName="Anthropic Claude Sonnet 4.5",
|
||||
|
|
@ -180,8 +305,11 @@ class AiAnthropic(BaseConnectorAi):
|
|||
payload: Dict[str, Any] = {
|
||||
"model": model.name,
|
||||
"messages": converted_messages,
|
||||
"temperature": temperature,
|
||||
}
|
||||
# Extended-Thinking models (claude-opus-4-7 etc.) reject any
|
||||
# `temperature` value -- only the model default is accepted.
|
||||
if _supportsCustomTemperature(model.name):
|
||||
payload["temperature"] = temperature
|
||||
|
||||
# Anthropic requires max_tokens - use provided value or throw error
|
||||
if maxTokens is None:
|
||||
|
|
@ -223,6 +351,7 @@ class AiAnthropic(BaseConnectorAi):
|
|||
|
||||
# Parse response
|
||||
anthropicResponse = response.json()
|
||||
stop_reason = anthropicResponse.get("stop_reason")
|
||||
|
||||
# Extract content and tool_use blocks from response
|
||||
content = ""
|
||||
|
|
@ -246,9 +375,25 @@ class AiAnthropic(BaseConnectorAi):
|
|||
|
||||
if not content and not toolCalls:
|
||||
logger.warning(f"Anthropic API returned empty content. Full response: {anthropicResponse}")
|
||||
content = "[Anthropic API returned empty response]"
|
||||
err = (
|
||||
"Anthropic refused the request (content policy) — try another model or adjust the prompt."
|
||||
if stop_reason == "refusal"
|
||||
else f"Anthropic returned no assistant text (stop_reason={stop_reason or 'unknown'})."
|
||||
)
|
||||
return AiModelResponse(
|
||||
content="",
|
||||
success=False,
|
||||
error=err,
|
||||
modelId=model.name,
|
||||
metadata={
|
||||
"response_id": anthropicResponse.get("id", ""),
|
||||
"stop_reason": stop_reason,
|
||||
},
|
||||
)
|
||||
|
||||
metadata = {"response_id": anthropicResponse.get("id", "")}
|
||||
if stop_reason:
|
||||
metadata["stop_reason"] = stop_reason
|
||||
if toolCalls:
|
||||
metadata["toolCalls"] = toolCalls
|
||||
|
||||
|
|
@ -285,10 +430,11 @@ class AiAnthropic(BaseConnectorAi):
|
|||
payload: Dict[str, Any] = {
|
||||
"model": model.name,
|
||||
"messages": converted,
|
||||
"temperature": temperature,
|
||||
"max_tokens": model.maxTokens,
|
||||
"stream": True,
|
||||
}
|
||||
if _supportsCustomTemperature(model.name):
|
||||
payload["temperature"] = temperature
|
||||
if system_prompt:
|
||||
payload["system"] = system_prompt
|
||||
if modelCall.tools:
|
||||
|
|
@ -363,6 +509,19 @@ class AiAnthropic(BaseConnectorAi):
|
|||
f"Anthropic stream returned empty response: model={model.name}, "
|
||||
f"stopReason={stopReason}"
|
||||
)
|
||||
err = (
|
||||
"Anthropic refused the request (content policy) — try another model or adjust the prompt."
|
||||
if stopReason == "refusal"
|
||||
else f"Anthropic returned no assistant text (stop_reason={stopReason or 'unknown'})."
|
||||
)
|
||||
yield AiModelResponse(
|
||||
content="",
|
||||
success=False,
|
||||
error=err,
|
||||
modelId=model.name,
|
||||
metadata={"stopReason": stopReason} if stopReason else {},
|
||||
)
|
||||
return
|
||||
|
||||
metadata: Dict[str, Any] = {}
|
||||
if stopReason:
|
||||
|
|
@ -513,8 +672,8 @@ class AiAnthropic(BaseConnectorAi):
|
|||
if systemPrompt:
|
||||
payload["system"] = systemPrompt
|
||||
|
||||
# Set temperature from model
|
||||
payload["temperature"] = temperature
|
||||
if _supportsCustomTemperature(model.name):
|
||||
payload["temperature"] = temperature
|
||||
|
||||
# Make API call with headers from httpClient (which includes anthropic-version)
|
||||
response = await self.httpClient.post(
|
||||
|
|
|
|||
|
|
@ -11,6 +11,30 @@ from modules.datamodels.datamodelAi import AiModel, PriorityEnum, ProcessingMode
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _supportsCustomTemperature(modelName: str) -> bool:
|
||||
"""Check whether an OpenAI model accepts a custom `temperature` value.
|
||||
|
||||
GPT-5.x and the o-series (o1/o3/o4) reasoning models reject every
|
||||
`temperature` value other than the default (1) with HTTP 400
|
||||
`unsupported_value`. For these models we must omit `temperature`
|
||||
from the payload entirely. Older chat-completions models
|
||||
(gpt-4o, gpt-4o-mini, gpt-4.1, gpt-3.5-*) still accept any value
|
||||
in [0, 2].
|
||||
|
||||
Returns:
|
||||
True if `temperature` may be sent; False if it must be omitted.
|
||||
"""
|
||||
if not modelName:
|
||||
return True
|
||||
name = modelName.lower()
|
||||
if name.startswith("gpt-5"):
|
||||
return False
|
||||
if name.startswith("o1") or name.startswith("o3") or name.startswith("o4"):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def loadConfigData():
|
||||
"""Load configuration data for OpenAI connector"""
|
||||
return {
|
||||
|
|
@ -123,6 +147,135 @@ class AiOpenai(BaseConnectorAi):
|
|||
version="gpt-4o",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0025 + (bytesReceived / 4 / 1000) * 0.01
|
||||
),
|
||||
AiModel(
|
||||
name="gpt-5.5",
|
||||
displayName="OpenAI GPT-5.5",
|
||||
connectorType="openai",
|
||||
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=1050000,
|
||||
costPer1kTokensInput=0.005, # $5/M tokens (OpenAI API, 2026-04)
|
||||
costPer1kTokensOutput=0.03, # $30/M tokens
|
||||
speedRating=8,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.QUALITY,
|
||||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 10),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 10),
|
||||
(OperationTypeEnum.DATA_GENERATE, 10),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||
(OperationTypeEnum.AGENT, 10),
|
||||
(OperationTypeEnum.DATA_QUERY, 8),
|
||||
),
|
||||
version="gpt-5.5",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.03
|
||||
),
|
||||
AiModel(
|
||||
name="gpt-5.4",
|
||||
displayName="OpenAI GPT-5.4",
|
||||
connectorType="openai",
|
||||
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=1050000,
|
||||
costPer1kTokensInput=0.0025, # $2.50/M tokens
|
||||
costPer1kTokensOutput=0.015, # $15/M tokens
|
||||
speedRating=8,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.BALANCED,
|
||||
processingMode=ProcessingModeEnum.ADVANCED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 9),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 10),
|
||||
(OperationTypeEnum.DATA_GENERATE, 10),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||
(OperationTypeEnum.AGENT, 9),
|
||||
(OperationTypeEnum.DATA_QUERY, 8),
|
||||
),
|
||||
version="gpt-5.4",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0025 + (bytesReceived / 4 / 1000) * 0.015
|
||||
),
|
||||
AiModel(
|
||||
name="gpt-5.4-mini",
|
||||
displayName="OpenAI GPT-5.4 Mini",
|
||||
connectorType="openai",
|
||||
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=400000,
|
||||
costPer1kTokensInput=0.00075, # $0.75/M tokens
|
||||
costPer1kTokensOutput=0.0045, # $4.50/M tokens
|
||||
speedRating=9,
|
||||
qualityRating=9,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.SPEED,
|
||||
processingMode=ProcessingModeEnum.BASIC,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 8),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 9),
|
||||
(OperationTypeEnum.DATA_GENERATE, 9),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 8),
|
||||
(OperationTypeEnum.AGENT, 8),
|
||||
(OperationTypeEnum.DATA_QUERY, 10),
|
||||
),
|
||||
version="gpt-5.4-mini",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.00075 + (bytesReceived / 4 / 1000) * 0.0045
|
||||
),
|
||||
AiModel(
|
||||
name="gpt-5.4-nano",
|
||||
displayName="OpenAI GPT-5.4 Nano",
|
||||
connectorType="openai",
|
||||
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=400000,
|
||||
costPer1kTokensInput=0.0002, # $0.20/M tokens
|
||||
costPer1kTokensOutput=0.00125, # $1.25/M tokens
|
||||
speedRating=10,
|
||||
qualityRating=7,
|
||||
functionCall=self.callAiBasic,
|
||||
functionCallStream=self.callAiBasicStream,
|
||||
priority=PriorityEnum.COST,
|
||||
processingMode=ProcessingModeEnum.BASIC,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.PLAN, 7),
|
||||
(OperationTypeEnum.DATA_ANALYSE, 7),
|
||||
(OperationTypeEnum.DATA_GENERATE, 8),
|
||||
(OperationTypeEnum.DATA_EXTRACT, 9),
|
||||
(OperationTypeEnum.AGENT, 7),
|
||||
(OperationTypeEnum.DATA_QUERY, 10),
|
||||
),
|
||||
version="gpt-5.4-nano",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.0002 + (bytesReceived / 4 / 1000) * 0.00125
|
||||
),
|
||||
AiModel(
|
||||
name="gpt-5.5",
|
||||
displayName="OpenAI GPT-5.5 Vision",
|
||||
connectorType="openai",
|
||||
apiUrl="https://api.openai.com/v1/chat/completions",
|
||||
temperature=0.2,
|
||||
maxTokens=128000,
|
||||
contextLength=1050000,
|
||||
costPer1kTokensInput=0.005,
|
||||
costPer1kTokensOutput=0.03,
|
||||
speedRating=6,
|
||||
qualityRating=10,
|
||||
functionCall=self.callAiImage,
|
||||
priority=PriorityEnum.QUALITY,
|
||||
processingMode=ProcessingModeEnum.DETAILED,
|
||||
operationTypes=createOperationTypeRatings(
|
||||
(OperationTypeEnum.IMAGE_ANALYSE, 10)
|
||||
),
|
||||
version="gpt-5.5",
|
||||
calculatepriceCHF=lambda processingTime, bytesSent, bytesReceived: (bytesSent / 4 / 1000) * 0.005 + (bytesReceived / 4 / 1000) * 0.03
|
||||
),
|
||||
AiModel(
|
||||
name="text-embedding-3-small",
|
||||
displayName="OpenAI Embedding Small",
|
||||
|
|
@ -215,9 +368,17 @@ class AiOpenai(BaseConnectorAi):
|
|||
payload = {
|
||||
"model": model.name,
|
||||
"messages": messages,
|
||||
"temperature": temperature,
|
||||
"max_tokens": maxTokens
|
||||
# Universal output-length cap. `max_tokens` is deprecated and
|
||||
# rejected outright by gpt-5.x / o-series; `max_completion_tokens`
|
||||
# is accepted by every current chat-completions model (legacy
|
||||
# gpt-4o, gpt-4.1, gpt-5.x, o1/o3/o4) per OpenAI API reference.
|
||||
"max_completion_tokens": maxTokens
|
||||
}
|
||||
# gpt-5.x and o-series only accept the default temperature (1) and
|
||||
# return HTTP 400 `unsupported_value` for anything else - omit the
|
||||
# field entirely for those models.
|
||||
if _supportsCustomTemperature(model.name):
|
||||
payload["temperature"] = temperature
|
||||
|
||||
if modelCall.tools:
|
||||
payload["tools"] = modelCall.tools
|
||||
|
|
@ -295,10 +456,15 @@ class AiOpenai(BaseConnectorAi):
|
|||
payload: Dict[str, Any] = {
|
||||
"model": model.name,
|
||||
"messages": messages,
|
||||
"temperature": temperature,
|
||||
"max_tokens": model.maxTokens,
|
||||
# See callAiBasic for the rationale: `max_completion_tokens`
|
||||
# is the universal output-length parameter; `max_tokens` is
|
||||
# deprecated and rejected by gpt-5.x / o-series.
|
||||
"max_completion_tokens": model.maxTokens,
|
||||
"stream": True,
|
||||
}
|
||||
if _supportsCustomTemperature(model.name):
|
||||
payload["temperature"] = temperature
|
||||
|
||||
if modelCall.tools:
|
||||
payload["tools"] = modelCall.tools
|
||||
payload["tool_choice"] = modelCall.toolChoice or "auto"
|
||||
|
|
@ -449,15 +615,15 @@ class AiOpenai(BaseConnectorAi):
|
|||
# Use the messages directly - they should already contain the image data
|
||||
# in the format: {"type": "image_url", "image_url": {"url": "data:...base64,..."}}
|
||||
|
||||
# Use parameters from model
|
||||
temperature = model.temperature
|
||||
# Don't set maxTokens - let the model use its full context length
|
||||
|
||||
payload = {
|
||||
"model": model.name,
|
||||
"messages": messages,
|
||||
"temperature": temperature
|
||||
}
|
||||
if _supportsCustomTemperature(model.name):
|
||||
payload["temperature"] = temperature
|
||||
|
||||
response = await self.httpClient.post(
|
||||
model.apiUrl,
|
||||
|
|
|
|||
|
|
@ -19,10 +19,28 @@ ALGORITHM = APP_CONFIG.get("Auth_ALGORITHM")
|
|||
ACCESS_TOKEN_EXPIRE_MINUTES = int(APP_CONFIG.get("APP_TOKEN_EXPIRY"))
|
||||
REFRESH_TOKEN_EXPIRE_DAYS = int(APP_CONFIG.get("APP_REFRESH_TOKEN_EXPIRY", "7"))
|
||||
|
||||
# Cookie security settings - use secure cookies based on whether API uses HTTPS
|
||||
# Cookies must have secure=True on HTTPS sites, secure=False on HTTP sites
|
||||
APP_API_URL = APP_CONFIG.get("APP_API_URL", "http://localhost:8000")
|
||||
USE_SECURE_COOKIES = APP_API_URL.startswith("https://") if APP_API_URL else False
|
||||
def _cookiePolicy() -> Tuple[bool, str, str]:
|
||||
"""
|
||||
Return (useSecure, samesiteStarlette, samesiteSetCookieHeader).
|
||||
|
||||
Evaluated on each Set-Cookie so policy is not frozen at module import (config refresh / load order).
|
||||
|
||||
Cross-origin SPA + API: SameSite=None and Secure=True so credentialed fetch sends cookies.
|
||||
HTTP dev: Lax + Secure=False.
|
||||
|
||||
APP_COOKIE_SECURE: explicit true/false (1/0, yes/no) overrides the APP_API_URL heuristic.
|
||||
"""
|
||||
explicit = (APP_CONFIG.get("APP_COOKIE_SECURE") or "").strip().lower()
|
||||
if explicit in ("1", "true", "yes"):
|
||||
useSecure = True
|
||||
elif explicit in ("0", "false", "no"):
|
||||
useSecure = False
|
||||
else:
|
||||
apiUrl = (APP_CONFIG.get("APP_API_URL") or "").strip()
|
||||
useSecure = apiUrl.startswith("https://")
|
||||
samesite = "none" if useSecure else "lax"
|
||||
samesiteHeader = "None" if useSecure else "Lax"
|
||||
return useSecure, samesite, samesiteHeader
|
||||
|
||||
|
||||
def createAccessToken(data: dict, expiresDelta: Optional[timedelta] = None) -> Tuple[str, "datetime"]:
|
||||
|
|
@ -54,13 +72,14 @@ def createRefreshToken(data: dict) -> Tuple[str, "datetime"]:
|
|||
|
||||
def setAccessTokenCookie(response: Response, token: str, expiresDelta: Optional[timedelta] = None) -> None:
|
||||
"""Set access token as httpOnly cookie."""
|
||||
useSecure, samesite, _ = _cookiePolicy()
|
||||
maxAge = int(expiresDelta.total_seconds()) if expiresDelta else ACCESS_TOKEN_EXPIRE_MINUTES * 60
|
||||
response.set_cookie(
|
||||
key="auth_token",
|
||||
value=token,
|
||||
httponly=True,
|
||||
secure=USE_SECURE_COOKIES, # Only secure in production (HTTPS)
|
||||
samesite="strict",
|
||||
secure=useSecure,
|
||||
samesite=samesite,
|
||||
path="/",
|
||||
max_age=maxAge
|
||||
)
|
||||
|
|
@ -68,12 +87,13 @@ def setAccessTokenCookie(response: Response, token: str, expiresDelta: Optional[
|
|||
|
||||
def setRefreshTokenCookie(response: Response, token: str) -> None:
|
||||
"""Set refresh token as httpOnly cookie."""
|
||||
useSecure, samesite, _ = _cookiePolicy()
|
||||
response.set_cookie(
|
||||
key="refresh_token",
|
||||
value=token,
|
||||
httponly=True,
|
||||
secure=USE_SECURE_COOKIES, # Only secure in production (HTTPS)
|
||||
samesite="strict",
|
||||
secure=useSecure,
|
||||
samesite=samesite,
|
||||
path="/",
|
||||
max_age=REFRESH_TOKEN_EXPIRE_DAYS * 24 * 60 * 60
|
||||
)
|
||||
|
|
@ -84,17 +104,23 @@ def clearAccessTokenCookie(response: Response) -> None:
|
|||
Clear access token cookie by setting it to expire immediately.
|
||||
Uses both raw header manipulation and FastAPI's delete_cookie for maximum browser compatibility.
|
||||
"""
|
||||
# Build secure flag based on environment
|
||||
secure_flag = "; Secure" if USE_SECURE_COOKIES else ""
|
||||
useSecure, samesite, samesiteHeader = _cookiePolicy()
|
||||
secure_flag = "; Secure" if useSecure else ""
|
||||
|
||||
# Primary method: Raw Set-Cookie header for guaranteed deletion
|
||||
response.headers.append(
|
||||
"Set-Cookie",
|
||||
f"auth_token=deleted; Path=/; Max-Age=0; Expires=Thu, 01 Jan 1970 00:00:00 GMT; HttpOnly{secure_flag}; SameSite=Strict"
|
||||
f"auth_token=deleted; Path=/; Max-Age=0; Expires=Thu, 01 Jan 1970 00:00:00 GMT; HttpOnly{secure_flag}; SameSite={samesiteHeader}"
|
||||
)
|
||||
|
||||
# Fallback: Also use FastAPI's built-in method
|
||||
response.delete_cookie(key="auth_token", path="/")
|
||||
# Fallback: Also use FastAPI's built-in method (match SameSite/Secure for invalidation)
|
||||
response.delete_cookie(
|
||||
key="auth_token",
|
||||
path="/",
|
||||
secure=useSecure,
|
||||
httponly=True,
|
||||
samesite=samesite,
|
||||
)
|
||||
|
||||
|
||||
def clearRefreshTokenCookie(response: Response) -> None:
|
||||
|
|
@ -102,16 +128,22 @@ def clearRefreshTokenCookie(response: Response) -> None:
|
|||
Clear refresh token cookie by setting it to expire immediately.
|
||||
Uses both raw header manipulation and FastAPI's delete_cookie for maximum browser compatibility.
|
||||
"""
|
||||
# Build secure flag based on environment
|
||||
secure_flag = "; Secure" if USE_SECURE_COOKIES else ""
|
||||
useSecure, samesite, samesiteHeader = _cookiePolicy()
|
||||
secure_flag = "; Secure" if useSecure else ""
|
||||
|
||||
# Primary method: Raw Set-Cookie header for guaranteed deletion
|
||||
response.headers.append(
|
||||
"Set-Cookie",
|
||||
f"refresh_token=deleted; Path=/; Max-Age=0; Expires=Thu, 01 Jan 1970 00:00:00 GMT; HttpOnly{secure_flag}; SameSite=Strict"
|
||||
f"refresh_token=deleted; Path=/; Max-Age=0; Expires=Thu, 01 Jan 1970 00:00:00 GMT; HttpOnly{secure_flag}; SameSite={samesiteHeader}"
|
||||
)
|
||||
|
||||
# Fallback: Also use FastAPI's built-in method
|
||||
response.delete_cookie(key="refresh_token", path="/")
|
||||
# Fallback: Also use FastAPI's built-in method (match SameSite/Secure for invalidation)
|
||||
response.delete_cookie(
|
||||
key="refresh_token",
|
||||
path="/",
|
||||
secure=useSecure,
|
||||
httponly=True,
|
||||
samesite=samesite,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -9,13 +9,15 @@ googleAuthScopes = [
|
|||
"https://www.googleapis.com/auth/userinfo.profile",
|
||||
]
|
||||
|
||||
# Google — Data app (Gmail + Drive + identity for token responses)
|
||||
# Google — Data app (Gmail + Drive + Calendar + Contacts + identity for token responses)
|
||||
googleDataScopes = [
|
||||
"openid",
|
||||
"https://www.googleapis.com/auth/userinfo.email",
|
||||
"https://www.googleapis.com/auth/userinfo.profile",
|
||||
"https://www.googleapis.com/auth/gmail.readonly",
|
||||
"https://www.googleapis.com/auth/drive.readonly",
|
||||
"https://www.googleapis.com/auth/calendar.readonly",
|
||||
"https://www.googleapis.com/auth/contacts.readonly",
|
||||
]
|
||||
|
||||
# Microsoft — Auth app: Graph profile only (MSAL adds openid, profile, offline_access, …)
|
||||
|
|
@ -34,9 +36,18 @@ msftDataScopes = [
|
|||
"OnlineMeetings.Read",
|
||||
"Chat.ReadWrite",
|
||||
"ChatMessage.Send",
|
||||
"Calendars.Read",
|
||||
"Contacts.Read",
|
||||
]
|
||||
|
||||
|
||||
def msftDataScopesForRefresh() -> str:
|
||||
"""Space-separated scope string identical to authorization request (Token v2 refresh)."""
|
||||
return " ".join(msftDataScopes)
|
||||
|
||||
|
||||
# Infomaniak intentionally has no OAuth scope set: the kDrive + Mail data APIs
|
||||
# are only reachable with manually issued Personal Access Tokens (see
|
||||
# wiki/d-guides/infomaniak-token-setup.md). The OAuth /authorize endpoint at
|
||||
# login.infomaniak.com only accepts identity scopes (openid/profile/email/phone)
|
||||
# and does not return tokens that work against /1/* data routes.
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ class TokenManager:
|
|||
self.google_client_id = APP_CONFIG.get("Service_GOOGLE_DATA_CLIENT_ID")
|
||||
self.google_client_secret = APP_CONFIG.get("Service_GOOGLE_DATA_CLIENT_SECRET")
|
||||
|
||||
|
||||
def refreshMicrosoftToken(self, refreshToken: str, userId: str, oldToken: Token) -> Optional[Token]:
|
||||
"""Refresh Microsoft OAuth token using refresh token"""
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -21,6 +21,47 @@ logger = logging.getLogger(__name__)
|
|||
# No mapping needed - table name = Pydantic model name exactly
|
||||
|
||||
|
||||
class DatabaseQueryError(RuntimeError):
|
||||
"""Raised by DB read methods when the underlying SQL query failed.
|
||||
|
||||
Empty result sets do NOT raise this — they return ``[]`` / ``None`` /
|
||||
``{"items": [], "totalItems": 0, "totalPages": 0}`` as before. This
|
||||
exception is reserved for **real** failures: psycopg2 ProgrammingError,
|
||||
DataError, OperationalError, IntegrityError, plus any unexpected
|
||||
Python error raised inside a query path.
|
||||
|
||||
Read methods used to silently swallow such errors and return empty
|
||||
collections, which made every caller incapable of distinguishing
|
||||
"no rows" from "broken query / type adapter / dropped column / lost
|
||||
connection". That hid concrete bugs (e.g. dict passed where Postgres
|
||||
expected a UUID string) behind misleading downstream "no record found"
|
||||
errors.
|
||||
"""
|
||||
|
||||
def __init__(self, table: str, message: str, original: BaseException = None):
|
||||
super().__init__(f"{table}: {message}")
|
||||
self.table = table
|
||||
self.original = original
|
||||
|
||||
|
||||
def _rollbackQuietly(connection) -> None:
|
||||
"""Restore the connection state after a failed query.
|
||||
|
||||
Postgres puts the connection in an error state after any failed
|
||||
statement; subsequent queries on the same connection raise
|
||||
``InFailedSqlTransaction`` until we rollback. We swallow rollback
|
||||
errors because the original query error is what the caller should
|
||||
see — a secondary rollback failure typically means the connection
|
||||
is gone and will be reopened on the next ``_ensure_connection``.
|
||||
"""
|
||||
if connection is None:
|
||||
return
|
||||
try:
|
||||
connection.rollback()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
class SystemTable(PowerOnModel):
|
||||
"""Data model for system table entries"""
|
||||
|
||||
|
|
@ -76,7 +117,7 @@ def _isJsonbType(fieldType) -> bool:
|
|||
return False
|
||||
|
||||
|
||||
def _get_model_fields(model_class) -> Dict[str, str]:
|
||||
def getModelFields(model_class) -> Dict[str, str]:
|
||||
"""Get all fields from Pydantic model and map to SQL types.
|
||||
|
||||
Supports explicit db_type override via json_schema_extra={"db_type": "vector(1536)"}.
|
||||
|
|
@ -121,22 +162,7 @@ def _get_model_fields(model_class) -> Dict[str, str]:
|
|||
return fields
|
||||
|
||||
|
||||
def _get_fk_sort_meta(model_class) -> Dict[str, Dict[str, str]]:
|
||||
"""Map FK field name -> {model, labelField} from json_schema_extra (fk_model + frontend_fk_display_field)."""
|
||||
result: Dict[str, Dict[str, str]] = {}
|
||||
for name, field_info in model_class.model_fields.items():
|
||||
extra = field_info.json_schema_extra
|
||||
if not extra or not isinstance(extra, dict):
|
||||
continue
|
||||
fk_model = extra.get("fk_model")
|
||||
label_field = extra.get("frontend_fk_display_field")
|
||||
if fk_model and label_field:
|
||||
result[name] = {"model": str(fk_model), "labelField": str(label_field)}
|
||||
return result
|
||||
|
||||
|
||||
|
||||
def _parseRecordFields(record: Dict[str, Any], fields: Dict[str, str], context: str = "") -> None:
|
||||
def parseRecordFields(record: Dict[str, Any], fields: Dict[str, str], context: str = "") -> None:
|
||||
"""Parse record fields in-place: numeric typing, vector parsing, JSONB deserialization."""
|
||||
import json as _json
|
||||
|
||||
|
|
@ -189,7 +215,7 @@ _current_user_id: contextvars.ContextVar[Optional[str]] = contextvars.ContextVar
|
|||
)
|
||||
|
||||
|
||||
def _get_cached_connector(
|
||||
def getCachedConnector(
|
||||
dbHost: str,
|
||||
dbDatabase: str,
|
||||
dbUser: str = None,
|
||||
|
|
@ -553,7 +579,7 @@ class DatabaseConnector:
|
|||
}
|
||||
|
||||
# Desired columns based on model
|
||||
model_fields = _get_model_fields(model_class)
|
||||
model_fields = getModelFields(model_class)
|
||||
desired_columns = set(["id"]) | set(model_fields.keys())
|
||||
|
||||
# Add missing columns
|
||||
|
|
@ -576,29 +602,48 @@ class DatabaseConnector:
|
|||
f"Could not add column '{col}' to '{table}': {add_err}"
|
||||
)
|
||||
|
||||
# Targeted type-downgrade: if a model field has been
|
||||
# changed from a structured type (JSONB) to a plain
|
||||
# TEXT field, alter the column so writes don't fail.
|
||||
# JSONB -> TEXT is a safe, lossless cast (JSONB is
|
||||
# rendered as its JSON-text representation; the
|
||||
# corresponding Pydantic ``@field_validator`` is
|
||||
# responsible for re-decoding legacy data on read).
|
||||
# Column type migrations for existing tables.
|
||||
# TEXT→DOUBLE PRECISION handles three value shapes:
|
||||
# 1. NULL / empty string → NULL
|
||||
# 2. ISO date(time) like "2025-01-22" or "2025-01-22T10:00:00+00" → epoch via EXTRACT
|
||||
# 3. Plain numeric string like "3.14" → direct cast
|
||||
_TEXT_TO_DOUBLE = (
|
||||
'DOUBLE PRECISION USING CASE'
|
||||
' WHEN "{col}" IS NULL OR "{col}" = \'\' THEN NULL'
|
||||
' WHEN "{col}" ~ \'^\\d{4}-\\d{2}-\\d{2}\''
|
||||
' THEN EXTRACT(EPOCH FROM "{col}"::timestamptz)'
|
||||
' ELSE NULLIF("{col}", \'\')::double precision'
|
||||
' END'
|
||||
)
|
||||
_SAFE_TYPE_CHANGES = {
|
||||
("jsonb", "TEXT"): "TEXT USING \"{col}\"::text",
|
||||
("text", "DOUBLE PRECISION"): _TEXT_TO_DOUBLE,
|
||||
("text", "INTEGER"): "INTEGER USING NULLIF(\"{col}\", '')::integer",
|
||||
("timestamp without time zone", "DOUBLE PRECISION"): 'DOUBLE PRECISION USING EXTRACT(EPOCH FROM "{col}" AT TIME ZONE \'UTC\')',
|
||||
("timestamp with time zone", "DOUBLE PRECISION"): 'DOUBLE PRECISION USING EXTRACT(EPOCH FROM "{col}")',
|
||||
("date", "DOUBLE PRECISION"): 'DOUBLE PRECISION USING EXTRACT(EPOCH FROM "{col}"::timestamp AT TIME ZONE \'UTC\')',
|
||||
}
|
||||
for col in sorted(desired_columns & existing_columns):
|
||||
if col == "id":
|
||||
continue
|
||||
desired_sql = (model_fields.get(col) or "").upper()
|
||||
currentType = existing_column_types.get(col, "")
|
||||
if desired_sql == "TEXT" and currentType == "jsonb":
|
||||
migration = _SAFE_TYPE_CHANGES.get((currentType, desired_sql))
|
||||
if migration:
|
||||
castExpr = migration.replace("{col}", col)
|
||||
try:
|
||||
cursor.execute('SAVEPOINT col_migrate')
|
||||
cursor.execute(
|
||||
f'ALTER TABLE "{table}" ALTER COLUMN "{col}" TYPE TEXT USING "{col}"::text'
|
||||
f'ALTER TABLE "{table}" ALTER COLUMN "{col}" TYPE {castExpr}'
|
||||
)
|
||||
cursor.execute('RELEASE SAVEPOINT col_migrate')
|
||||
logger.info(
|
||||
f"Downgraded column '{col}' from JSONB to TEXT on '{table}'"
|
||||
f"Migrated column '{col}' from {currentType} to {desired_sql} on '{table}'"
|
||||
)
|
||||
except Exception as alter_err:
|
||||
cursor.execute('ROLLBACK TO SAVEPOINT col_migrate')
|
||||
logger.warning(
|
||||
f"Could not downgrade column '{col}' on '{table}': {alter_err}"
|
||||
f"Could not migrate column '{col}' on '{table}': {alter_err}"
|
||||
)
|
||||
except Exception as ensure_err:
|
||||
logger.warning(
|
||||
|
|
@ -633,7 +678,7 @@ class DatabaseConnector:
|
|||
|
||||
def _create_table_from_model(self, cursor, table: str, model_class: type) -> None:
|
||||
"""Create table with columns matching Pydantic model fields."""
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
|
||||
# Enable pgvector if any field uses vector type
|
||||
if any(_isVectorType(sqlType) for sqlType in fields.values()):
|
||||
|
|
@ -666,7 +711,7 @@ class DatabaseConnector:
|
|||
) -> None:
|
||||
"""Save record to normalized table with explicit columns."""
|
||||
# Get columns from Pydantic model instead of database schema
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
columns = ["id"] + [field for field in fields.keys() if field != "id"]
|
||||
|
||||
if not columns:
|
||||
|
|
@ -751,14 +796,15 @@ class DatabaseConnector:
|
|||
|
||||
# Convert row to dict and handle JSONB fields
|
||||
record = dict(row)
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
|
||||
_parseRecordFields(record, fields, f"record {recordId}")
|
||||
parseRecordFields(record, fields, f"record {recordId}")
|
||||
|
||||
return record
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading record {recordId} from table {table}: {e}")
|
||||
return None
|
||||
_rollbackQuietly(getattr(self, "connection", None))
|
||||
raise DatabaseQueryError(table, str(e), original=e) from e
|
||||
|
||||
def getRecord(self, model_class: type, recordId: str) -> Optional[Dict[str, Any]]:
|
||||
"""Load one row by primary key (routes / services; wraps _loadRecord)."""
|
||||
|
|
@ -788,7 +834,10 @@ class DatabaseConnector:
|
|||
createdTs = record.get("sysCreatedAt")
|
||||
if createdTs is None or createdTs == 0 or createdTs == 0.0:
|
||||
record["sysCreatedAt"] = currentTime
|
||||
if effective_user_id:
|
||||
# Do not wipe caller-provided sysCreatedBy (e.g. FileItem from createFile with
|
||||
# real user). ContextVar can be "system" for the DB pool while the business
|
||||
# user is set on the record from model_dump().
|
||||
if effective_user_id and not record.get("sysCreatedBy"):
|
||||
record["sysCreatedBy"] = effective_user_id
|
||||
elif not record.get("sysCreatedBy"):
|
||||
if effective_user_id:
|
||||
|
|
@ -822,10 +871,10 @@ class DatabaseConnector:
|
|||
cursor.execute(f'SELECT * FROM "{table}" ORDER BY "id"')
|
||||
records = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
modelFields = model_class.model_fields
|
||||
for record in records:
|
||||
_parseRecordFields(record, fields, f"table {table}")
|
||||
parseRecordFields(record, fields, f"table {table}")
|
||||
# Set type-aware defaults for NULL JSONB fields
|
||||
for fieldName, fieldType in fields.items():
|
||||
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
|
||||
|
|
@ -844,7 +893,8 @@ class DatabaseConnector:
|
|||
return records
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading table {table}: {e}")
|
||||
return []
|
||||
_rollbackQuietly(getattr(self, "connection", None))
|
||||
raise DatabaseQueryError(table, str(e), original=e) from e
|
||||
|
||||
def _registerInitialId(self, table: str, initialId: str) -> bool:
|
||||
"""Registers the initial ID for a table."""
|
||||
|
|
@ -1011,10 +1061,10 @@ class DatabaseConnector:
|
|||
cursor.execute(query, where_values)
|
||||
records = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
modelFields = model_class.model_fields
|
||||
for record in records:
|
||||
_parseRecordFields(record, fields, f"table {table}")
|
||||
parseRecordFields(record, fields, f"table {table}")
|
||||
for fieldName, fieldType in fields.items():
|
||||
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
|
||||
fieldInfo = modelFields.get(fieldName)
|
||||
|
|
@ -1043,7 +1093,8 @@ class DatabaseConnector:
|
|||
return records
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading records from table {table}: {e}")
|
||||
return []
|
||||
_rollbackQuietly(getattr(self, "connection", None))
|
||||
raise DatabaseQueryError(table, str(e), original=e) from e
|
||||
|
||||
def _buildPaginationClauses(
|
||||
self,
|
||||
|
|
@ -1055,7 +1106,7 @@ class DatabaseConnector:
|
|||
Translate PaginationParams + recordFilter into SQL clauses.
|
||||
Returns (where_clause, order_clause, limit_clause, values, count_values).
|
||||
"""
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
validColumns = set(fields.keys())
|
||||
|
||||
where_parts: List[str] = []
|
||||
|
|
@ -1111,8 +1162,15 @@ class DatabaseConnector:
|
|||
values.append(f"%{v}")
|
||||
elif op in ("gt", "gte", "lt", "lte"):
|
||||
sqlOp = {"gt": ">", "gte": ">=", "lt": "<", "lte": "<="}[op]
|
||||
where_parts.append(f'"{key}"::TEXT {sqlOp} %s')
|
||||
values.append(str(v))
|
||||
if colType in ("INTEGER", "DOUBLE PRECISION"):
|
||||
try:
|
||||
where_parts.append(f'"{key}"::double precision {sqlOp} %s')
|
||||
values.append(float(v))
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
else:
|
||||
where_parts.append(f'"{key}"::TEXT {sqlOp} %s')
|
||||
values.append(str(v))
|
||||
elif op == "between":
|
||||
fromVal = v.get("from", "") if isinstance(v, dict) else ""
|
||||
toVal = v.get("to", "") if isinstance(v, dict) else ""
|
||||
|
|
@ -1137,6 +1195,21 @@ class DatabaseConnector:
|
|||
toTs = _dt.strptime(str(toVal), '%Y-%m-%d').replace(hour=23, minute=59, second=59, tzinfo=_tz.utc).timestamp()
|
||||
where_parts.append(f'"{key}" <= %s')
|
||||
values.append(toTs)
|
||||
elif isNumericCol:
|
||||
try:
|
||||
if fromVal and toVal:
|
||||
where_parts.append(
|
||||
f'"{key}"::double precision >= %s AND "{key}"::double precision <= %s'
|
||||
)
|
||||
values.extend([float(fromVal), float(toVal)])
|
||||
elif fromVal:
|
||||
where_parts.append(f'"{key}"::double precision >= %s')
|
||||
values.append(float(fromVal))
|
||||
elif toVal:
|
||||
where_parts.append(f'"{key}"::double precision <= %s')
|
||||
values.append(float(toVal))
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
else:
|
||||
if fromVal and toVal:
|
||||
where_parts.append(f'"{key}"::TEXT >= %s AND "{key}"::TEXT <= %s')
|
||||
|
|
@ -1214,10 +1287,10 @@ class DatabaseConnector:
|
|||
cursor.execute(dataSql, values)
|
||||
records = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
modelFields = model_class.model_fields
|
||||
for record in records:
|
||||
_parseRecordFields(record, fields, f"table {table}")
|
||||
parseRecordFields(record, fields, f"table {table}")
|
||||
for fieldName, fieldType in fields.items():
|
||||
if fieldType == "JSONB" and fieldName in record and record[fieldName] is None:
|
||||
fieldInfo = modelFields.get(fieldName)
|
||||
|
|
@ -1235,13 +1308,17 @@ class DatabaseConnector:
|
|||
if fieldFilter and isinstance(fieldFilter, list):
|
||||
records = [{f: r[f] for f in fieldFilter if f in r} for r in records]
|
||||
|
||||
pageSize = pagination.pageSize if pagination else max(totalItems, 1)
|
||||
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
|
||||
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||
enrichRowsWithFkLabels(records, model_class)
|
||||
|
||||
return {"items": records, "totalItems": totalItems, "totalPages": totalPages}
|
||||
pageSize = pagination.pageSize if pagination else max(totalItems, 1)
|
||||
totalPages = math.ceil(totalItems / pageSize) if totalItems > 0 else 0
|
||||
|
||||
return {"items": records, "totalItems": totalItems, "totalPages": totalPages}
|
||||
except Exception as e:
|
||||
logger.error(f"Error in getRecordsetPaginated for table {table}: {e}")
|
||||
return {"items": [], "totalItems": 0, "totalPages": 0}
|
||||
_rollbackQuietly(getattr(self, "connection", None))
|
||||
raise DatabaseQueryError(table, str(e), original=e) from e
|
||||
|
||||
def getDistinctColumnValues(
|
||||
self,
|
||||
|
|
@ -1249,13 +1326,18 @@ class DatabaseConnector:
|
|||
column: str,
|
||||
pagination=None,
|
||||
recordFilter: Dict[str, Any] = None,
|
||||
) -> List[str]:
|
||||
"""
|
||||
Returns sorted distinct non-null values for a column using SQL DISTINCT.
|
||||
includeEmpty: bool = True,
|
||||
) -> List[Optional[str]]:
|
||||
"""Return sorted distinct values for a column using SQL DISTINCT.
|
||||
|
||||
When ``includeEmpty`` is True (default), NULL and empty-string rows are
|
||||
represented as a single ``None`` entry at the end of the list — this
|
||||
allows the frontend to offer a "(Leer)" filter option.
|
||||
|
||||
Applies cross-filtering (all filters except the requested column).
|
||||
"""
|
||||
table = model_class.__name__
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
|
||||
if column not in fields:
|
||||
return []
|
||||
|
|
@ -1274,21 +1356,32 @@ class DatabaseConnector:
|
|||
where_clause, _, _, values, _ = \
|
||||
self._buildPaginationClauses(model_class, pagination, recordFilter)
|
||||
|
||||
sql = (
|
||||
f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{where_clause} '
|
||||
f'WHERE "{column}" IS NOT NULL AND "{column}"::TEXT != \'\' '
|
||||
if not where_clause else
|
||||
f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{where_clause} '
|
||||
f'AND "{column}" IS NOT NULL AND "{column}"::TEXT != \'\' '
|
||||
)
|
||||
sql += 'ORDER BY val'
|
||||
nonNullCond = f'"{column}" IS NOT NULL AND "{column}"::TEXT != \'\''
|
||||
if where_clause:
|
||||
sql = f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}"{where_clause} AND {nonNullCond} ORDER BY val'
|
||||
else:
|
||||
sql = f'SELECT DISTINCT "{column}"::TEXT AS val FROM "{table}" WHERE {nonNullCond} ORDER BY val'
|
||||
|
||||
with self.connection.cursor() as cursor:
|
||||
cursor.execute(sql, values)
|
||||
return [row["val"] for row in cursor.fetchall()]
|
||||
result: List[Optional[str]] = [row["val"] for row in cursor.fetchall()]
|
||||
|
||||
if includeEmpty:
|
||||
emptyCond = f'"{column}" IS NULL OR "{column}"::TEXT = \'\''
|
||||
if where_clause:
|
||||
emptySql = f'SELECT 1 FROM "{table}"{where_clause} AND ({emptyCond}) LIMIT 1'
|
||||
else:
|
||||
emptySql = f'SELECT 1 FROM "{table}" WHERE ({emptyCond}) LIMIT 1'
|
||||
with self.connection.cursor() as cursor:
|
||||
cursor.execute(emptySql, values)
|
||||
if cursor.fetchone():
|
||||
result.append(None)
|
||||
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"Error in getDistinctColumnValues for {table}.{column}: {e}")
|
||||
return []
|
||||
_rollbackQuietly(getattr(self, "connection", None))
|
||||
raise DatabaseQueryError(table, str(e), original=e) from e
|
||||
|
||||
def recordCreate(
|
||||
self, model_class: type, record: Union[Dict[str, Any], BaseModel]
|
||||
|
|
@ -1419,7 +1512,7 @@ class DatabaseConnector:
|
|||
if not self._ensureTableExists(model_class):
|
||||
raise ValueError(f"Table {table} does not exist")
|
||||
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
columns = ["id"] + [f for f in fields.keys() if f != "id"]
|
||||
modelFields = model_class.model_fields
|
||||
|
||||
|
|
@ -1441,7 +1534,7 @@ class DatabaseConnector:
|
|||
createdTs = rec.get("sysCreatedAt")
|
||||
if createdTs is None or createdTs == 0 or createdTs == 0.0:
|
||||
rec["sysCreatedAt"] = currentTime
|
||||
if effectiveUserId:
|
||||
if effectiveUserId and not rec.get("sysCreatedBy"):
|
||||
rec["sysCreatedBy"] = effectiveUserId
|
||||
elif not rec.get("sysCreatedBy") and effectiveUserId:
|
||||
rec["sysCreatedBy"] = effectiveUserId
|
||||
|
|
@ -1541,7 +1634,7 @@ class DatabaseConnector:
|
|||
if not self._ensureTableExists(model_class):
|
||||
return 0
|
||||
|
||||
fields = _get_model_fields(model_class)
|
||||
fields = getModelFields(model_class)
|
||||
clauses: List[str] = []
|
||||
params: List[Any] = []
|
||||
for key, val in recordFilter.items():
|
||||
|
|
@ -1659,14 +1752,15 @@ class DatabaseConnector:
|
|||
cursor.execute(query, params)
|
||||
records = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
fields = _get_model_fields(modelClass)
|
||||
fields = getModelFields(modelClass)
|
||||
for record in records:
|
||||
_parseRecordFields(record, fields, f"semanticSearch {table}")
|
||||
parseRecordFields(record, fields, f"semanticSearch {table}")
|
||||
|
||||
return records
|
||||
except Exception as e:
|
||||
logger.error(f"Error in semantic search on {table}: {e}")
|
||||
return []
|
||||
_rollbackQuietly(getattr(self, "connection", None))
|
||||
raise DatabaseQueryError(table, str(e), original=e) from e
|
||||
|
||||
def close(self, forceClose: bool = False):
|
||||
"""Close the database connection.
|
||||
|
|
|
|||
|
|
@ -58,6 +58,12 @@ class ConnectorResolver:
|
|||
except ImportError:
|
||||
logger.warning("ClickupConnector not available")
|
||||
|
||||
try:
|
||||
from modules.connectors.providerInfomaniak.connectorInfomaniak import InfomaniakConnector
|
||||
ConnectorResolver._providerRegistry["infomaniak"] = InfomaniakConnector
|
||||
except ImportError:
|
||||
logger.warning("InfomaniakConnector not available")
|
||||
|
||||
async def resolve(self, connectionId: str) -> ProviderConnector:
|
||||
"""Resolve connectionId to a ProviderConnector with a fresh access token."""
|
||||
connection = await self._loadConnection(connectionId)
|
||||
|
|
|
|||
|
|
@ -210,6 +210,9 @@ class ClickupListsAdapter(ServiceAdapter):
|
|||
data = await self._svc.getTask(task_id)
|
||||
if isinstance(data, dict) and data.get("error"):
|
||||
return json.dumps(data).encode("utf-8")
|
||||
returnedId = data.get("id", "") if isinstance(data, dict) else ""
|
||||
if returnedId and returnedId != task_id:
|
||||
logger.warning(f"ClickUp download: requested task_id={task_id} but API returned id={returnedId}")
|
||||
payload = json.dumps(data, indent=2).encode("utf-8")
|
||||
return DownloadResult(data=payload, fileName=f"task-{task_id}.json", mimeType="application/json")
|
||||
|
||||
|
|
|
|||
|
|
@ -14,6 +14,8 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
_DRIVE_BASE = "https://www.googleapis.com/drive/v3"
|
||||
_GMAIL_BASE = "https://gmail.googleapis.com/gmail/v1"
|
||||
_CALENDAR_BASE = "https://www.googleapis.com/calendar/v3"
|
||||
_PEOPLE_BASE = "https://people.googleapis.com/v1"
|
||||
|
||||
|
||||
async def _googleGet(token: str, url: str) -> Dict[str, Any]:
|
||||
|
|
@ -274,12 +276,480 @@ class GmailAdapter(ServiceAdapter):
|
|||
]
|
||||
|
||||
|
||||
class CalendarAdapter(ServiceAdapter):
|
||||
"""Google Calendar ServiceAdapter -- browse calendars, list events, .ics download.
|
||||
|
||||
Path conventions:
|
||||
``""`` / ``"/"`` -> list calendars from ``calendarList``
|
||||
``"/<calendarId>"`` -> list upcoming events in that calendar
|
||||
``"/<calendarId>/<eventId>"`` -> reserved for future event detail browse
|
||||
"""
|
||||
|
||||
_DEFAULT_EVENT_LIMIT = 100
|
||||
_MAX_EVENT_LIMIT = 2500
|
||||
|
||||
def __init__(self, accessToken: str):
|
||||
self._token = accessToken
|
||||
|
||||
async def browse(
|
||||
self,
|
||||
path: str,
|
||||
filter: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[ExternalEntry]:
|
||||
cleanPath = (path or "").strip("/")
|
||||
if not cleanPath:
|
||||
url = f"{_CALENDAR_BASE}/users/me/calendarList?maxResults=250"
|
||||
result = await _googleGet(self._token, url)
|
||||
if "error" in result:
|
||||
logger.warning(f"Google Calendar list failed: {result['error']}")
|
||||
return []
|
||||
calendars = result.get("items", [])
|
||||
if filter:
|
||||
f = filter.lower()
|
||||
calendars = [c for c in calendars if f in (c.get("summary") or "").lower()]
|
||||
return [
|
||||
ExternalEntry(
|
||||
name=c.get("summaryOverride") or c.get("summary", ""),
|
||||
path=f"/{c.get('id', '')}",
|
||||
isFolder=True,
|
||||
metadata={
|
||||
"id": c.get("id"),
|
||||
"primary": c.get("primary", False),
|
||||
"accessRole": c.get("accessRole"),
|
||||
"backgroundColor": c.get("backgroundColor"),
|
||||
"timeZone": c.get("timeZone"),
|
||||
},
|
||||
)
|
||||
for c in calendars
|
||||
]
|
||||
|
||||
from urllib.parse import quote
|
||||
calendarId = cleanPath.split("/", 1)[0]
|
||||
effectiveLimit = self._DEFAULT_EVENT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_EVENT_LIMIT))
|
||||
url = (
|
||||
f"{_CALENDAR_BASE}/calendars/{quote(calendarId, safe='')}/events"
|
||||
f"?maxResults={effectiveLimit}&orderBy=startTime&singleEvents=true"
|
||||
)
|
||||
result = await _googleGet(self._token, url)
|
||||
if "error" in result:
|
||||
logger.warning(f"Google Calendar events failed: {result['error']}")
|
||||
return []
|
||||
events = result.get("items", [])
|
||||
return [
|
||||
ExternalEntry(
|
||||
name=ev.get("summary", "(no title)"),
|
||||
path=f"/{calendarId}/{ev.get('id', '')}",
|
||||
isFolder=False,
|
||||
mimeType="text/calendar",
|
||||
metadata={
|
||||
"id": ev.get("id"),
|
||||
"start": (ev.get("start") or {}).get("dateTime") or (ev.get("start") or {}).get("date"),
|
||||
"end": (ev.get("end") or {}).get("dateTime") or (ev.get("end") or {}).get("date"),
|
||||
"location": ev.get("location"),
|
||||
"organizer": (ev.get("organizer") or {}).get("email"),
|
||||
"htmlLink": ev.get("htmlLink"),
|
||||
"status": ev.get("status"),
|
||||
},
|
||||
)
|
||||
for ev in events
|
||||
]
|
||||
|
||||
async def download(self, path: str) -> DownloadResult:
|
||||
from urllib.parse import quote
|
||||
cleanPath = (path or "").strip("/")
|
||||
if "/" not in cleanPath:
|
||||
return DownloadResult()
|
||||
calendarId, eventId = cleanPath.split("/", 1)
|
||||
url = f"{_CALENDAR_BASE}/calendars/{quote(calendarId, safe='')}/events/{quote(eventId, safe='')}"
|
||||
ev = await _googleGet(self._token, url)
|
||||
if "error" in ev:
|
||||
logger.warning(f"Google Calendar event fetch failed: {ev['error']}")
|
||||
return DownloadResult()
|
||||
icsBytes = _googleEventToIcs(ev)
|
||||
summary = ev.get("summary") or eventId
|
||||
safeName = _googleSafeFileName(summary) or "event"
|
||||
return DownloadResult(
|
||||
data=icsBytes,
|
||||
fileName=f"{safeName}.ics",
|
||||
mimeType="text/calendar",
|
||||
)
|
||||
|
||||
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||
return {"error": "Google Calendar upload not supported"}
|
||||
|
||||
async def search(
|
||||
self,
|
||||
query: str,
|
||||
path: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[ExternalEntry]:
|
||||
from urllib.parse import quote
|
||||
calendarId = (path or "").strip("/").split("/", 1)[0] or "primary"
|
||||
effectiveLimit = self._DEFAULT_EVENT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_EVENT_LIMIT))
|
||||
url = (
|
||||
f"{_CALENDAR_BASE}/calendars/{quote(calendarId, safe='')}/events"
|
||||
f"?q={quote(query, safe='')}&maxResults={effectiveLimit}&singleEvents=true"
|
||||
)
|
||||
result = await _googleGet(self._token, url)
|
||||
if "error" in result:
|
||||
return []
|
||||
return [
|
||||
ExternalEntry(
|
||||
name=ev.get("summary", "(no title)"),
|
||||
path=f"/{calendarId}/{ev.get('id', '')}",
|
||||
isFolder=False,
|
||||
mimeType="text/calendar",
|
||||
metadata={
|
||||
"id": ev.get("id"),
|
||||
"start": (ev.get("start") or {}).get("dateTime") or (ev.get("start") or {}).get("date"),
|
||||
"end": (ev.get("end") or {}).get("dateTime") or (ev.get("end") or {}).get("date"),
|
||||
},
|
||||
)
|
||||
for ev in result.get("items", [])
|
||||
]
|
||||
|
||||
|
||||
class ContactsAdapter(ServiceAdapter):
|
||||
"""Google Contacts ServiceAdapter -- People API (read-only).
|
||||
|
||||
Path conventions:
|
||||
``""`` / ``"/"`` -> list contact groups (incl. virtual ``all`` for the user's connections)
|
||||
``"/all"`` -> list all ``people/me/connections``
|
||||
``"/<groupResourceName>"`` -> list members of that contact group (e.g. ``contactGroups/myFriends``)
|
||||
``"/<group>/<personId>"`` -> reserved for future detail browse;
|
||||
``personId`` is the suffix after ``people/``
|
||||
"""
|
||||
|
||||
_DEFAULT_CONTACT_LIMIT = 200
|
||||
_MAX_CONTACT_LIMIT = 1000
|
||||
_PERSON_FIELDS = (
|
||||
"names,emailAddresses,phoneNumbers,organizations,addresses,biographies,memberships"
|
||||
)
|
||||
|
||||
def __init__(self, accessToken: str):
|
||||
self._token = accessToken
|
||||
|
||||
async def browse(
|
||||
self,
|
||||
path: str,
|
||||
filter: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[ExternalEntry]:
|
||||
cleanPath = (path or "").strip("/")
|
||||
if not cleanPath:
|
||||
entries: List[ExternalEntry] = [
|
||||
ExternalEntry(
|
||||
name="Alle Kontakte",
|
||||
path="/all",
|
||||
isFolder=True,
|
||||
metadata={"id": "all", "isVirtual": True},
|
||||
),
|
||||
]
|
||||
url = f"{_PEOPLE_BASE}/contactGroups?pageSize=200"
|
||||
result = await _googleGet(self._token, url)
|
||||
if "error" not in result:
|
||||
for grp in result.get("contactGroups", []):
|
||||
name = grp.get("formattedName") or grp.get("name") or ""
|
||||
if not name:
|
||||
continue
|
||||
entries.append(
|
||||
ExternalEntry(
|
||||
name=name,
|
||||
path=f"/{grp.get('resourceName', '')}",
|
||||
isFolder=True,
|
||||
metadata={
|
||||
"id": grp.get("resourceName"),
|
||||
"memberCount": grp.get("memberCount", 0),
|
||||
"groupType": grp.get("groupType"),
|
||||
},
|
||||
)
|
||||
)
|
||||
else:
|
||||
logger.warning(f"Google contactGroups list failed: {result['error']}")
|
||||
return entries
|
||||
|
||||
from urllib.parse import quote
|
||||
effectiveLimit = self._DEFAULT_CONTACT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_CONTACT_LIMIT))
|
||||
groupRef = cleanPath.split("/", 1)[0]
|
||||
if groupRef == "all":
|
||||
url = (
|
||||
f"{_PEOPLE_BASE}/people/me/connections"
|
||||
f"?pageSize={min(effectiveLimit, 1000)}&personFields={self._PERSON_FIELDS}"
|
||||
)
|
||||
result = await _googleGet(self._token, url)
|
||||
if "error" in result:
|
||||
logger.warning(f"Google People connections failed: {result['error']}")
|
||||
return []
|
||||
people = result.get("connections", [])
|
||||
else:
|
||||
groupResource = groupRef
|
||||
grpUrl = (
|
||||
f"{_PEOPLE_BASE}/{quote(groupResource, safe='/')}"
|
||||
f"?maxMembers={min(effectiveLimit, 1000)}"
|
||||
)
|
||||
grpResult = await _googleGet(self._token, grpUrl)
|
||||
if "error" in grpResult:
|
||||
logger.warning(f"Google contactGroup detail failed: {grpResult['error']}")
|
||||
return []
|
||||
memberResourceNames = grpResult.get("memberResourceNames") or []
|
||||
if not memberResourceNames:
|
||||
return []
|
||||
chunkSize = 200
|
||||
people: List[Dict[str, Any]] = []
|
||||
for i in range(0, min(len(memberResourceNames), effectiveLimit), chunkSize):
|
||||
chunk = memberResourceNames[i : i + chunkSize]
|
||||
params = "&".join(f"resourceNames={quote(rn, safe='/')}" for rn in chunk)
|
||||
batchUrl = f"{_PEOPLE_BASE}/people:batchGet?{params}&personFields={self._PERSON_FIELDS}"
|
||||
batchResult = await _googleGet(self._token, batchUrl)
|
||||
if "error" in batchResult:
|
||||
logger.warning(f"Google People batchGet failed: {batchResult['error']}")
|
||||
continue
|
||||
for resp in batchResult.get("responses", []):
|
||||
person = resp.get("person")
|
||||
if person:
|
||||
people.append(person)
|
||||
if len(people) >= effectiveLimit:
|
||||
break
|
||||
|
||||
return [
|
||||
ExternalEntry(
|
||||
name=_googlePersonLabel(p) or "(no name)",
|
||||
path=f"/{groupRef}/{(p.get('resourceName', '') or '').split('/')[-1]}",
|
||||
isFolder=False,
|
||||
mimeType="text/vcard",
|
||||
metadata={
|
||||
"id": p.get("resourceName"),
|
||||
"emails": [e.get("value") for e in (p.get("emailAddresses") or []) if e.get("value")],
|
||||
"phones": [pn.get("value") for pn in (p.get("phoneNumbers") or []) if pn.get("value")],
|
||||
"organization": (p.get("organizations") or [{}])[0].get("name") if p.get("organizations") else None,
|
||||
},
|
||||
)
|
||||
for p in people[:effectiveLimit]
|
||||
]
|
||||
|
||||
async def download(self, path: str) -> DownloadResult:
|
||||
from urllib.parse import quote
|
||||
cleanPath = (path or "").strip("/")
|
||||
if "/" not in cleanPath:
|
||||
return DownloadResult()
|
||||
personSuffix = cleanPath.split("/")[-1]
|
||||
if not personSuffix:
|
||||
return DownloadResult()
|
||||
url = f"{_PEOPLE_BASE}/people/{quote(personSuffix, safe='')}?personFields={self._PERSON_FIELDS}"
|
||||
person = await _googleGet(self._token, url)
|
||||
if "error" in person:
|
||||
logger.warning(f"Google People fetch failed: {person['error']}")
|
||||
return DownloadResult()
|
||||
vcfBytes = _googlePersonToVcard(person)
|
||||
label = _googlePersonLabel(person) or personSuffix
|
||||
safeName = _googleSafeFileName(label) or "contact"
|
||||
return DownloadResult(
|
||||
data=vcfBytes,
|
||||
fileName=f"{safeName}.vcf",
|
||||
mimeType="text/vcard",
|
||||
)
|
||||
|
||||
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||
return {"error": "Google Contacts upload not supported"}
|
||||
|
||||
async def search(
|
||||
self,
|
||||
query: str,
|
||||
path: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[ExternalEntry]:
|
||||
from urllib.parse import quote
|
||||
effectiveLimit = self._DEFAULT_CONTACT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_CONTACT_LIMIT))
|
||||
url = (
|
||||
f"{_PEOPLE_BASE}/people:searchContacts"
|
||||
f"?query={quote(query, safe='')}&pageSize={min(effectiveLimit, 30)}"
|
||||
f"&readMask={self._PERSON_FIELDS}"
|
||||
)
|
||||
result = await _googleGet(self._token, url)
|
||||
if "error" in result:
|
||||
return []
|
||||
entries: List[ExternalEntry] = []
|
||||
for r in result.get("results", []):
|
||||
p = r.get("person") or {}
|
||||
entries.append(
|
||||
ExternalEntry(
|
||||
name=_googlePersonLabel(p) or "(no name)",
|
||||
path=f"/search/{(p.get('resourceName', '') or '').split('/')[-1]}",
|
||||
isFolder=False,
|
||||
mimeType="text/vcard",
|
||||
metadata={
|
||||
"id": p.get("resourceName"),
|
||||
"emails": [e.get("value") for e in (p.get("emailAddresses") or []) if e.get("value")],
|
||||
},
|
||||
)
|
||||
)
|
||||
return entries
|
||||
|
||||
|
||||
def _googleSafeFileName(name: str) -> str:
|
||||
import re
|
||||
return re.sub(r'[<>:"/\\|?*\x00-\x1f]', "_", name or "")[:80].strip(". ")
|
||||
|
||||
|
||||
def _googleIcsEscape(value: str) -> str:
|
||||
if value is None:
|
||||
return ""
|
||||
return (
|
||||
value.replace("\\", "\\\\")
|
||||
.replace(";", "\\;")
|
||||
.replace(",", "\\,")
|
||||
.replace("\r\n", "\\n")
|
||||
.replace("\n", "\\n")
|
||||
)
|
||||
|
||||
|
||||
def _googleIcsDateTime(value: Optional[str]) -> Optional[str]:
|
||||
"""Convert a Google Calendar dateTime/date string to RFC 5545 format (UTC)."""
|
||||
if not value:
|
||||
return None
|
||||
from datetime import datetime, timezone
|
||||
try:
|
||||
if "T" not in value:
|
||||
dt = datetime.strptime(value, "%Y-%m-%d")
|
||||
return dt.strftime("%Y%m%d")
|
||||
normalized = value.replace("Z", "+00:00") if value.endswith("Z") else value
|
||||
dt = datetime.fromisoformat(normalized)
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone(timezone.utc).strftime("%Y%m%dT%H%M%SZ")
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def _googleEventToIcs(event: Dict[str, Any]) -> bytes:
|
||||
"""Build a minimal RFC 5545 VCALENDAR/VEVENT for a Google Calendar event."""
|
||||
from datetime import datetime, timezone
|
||||
uid = event.get("iCalUID") or event.get("id") or "unknown@poweron"
|
||||
summary = _googleIcsEscape(event.get("summary") or "")
|
||||
location = _googleIcsEscape(event.get("location") or "")
|
||||
description = _googleIcsEscape(event.get("description") or "")
|
||||
rawStart = (event.get("start") or {}).get("dateTime") or (event.get("start") or {}).get("date")
|
||||
rawEnd = (event.get("end") or {}).get("dateTime") or (event.get("end") or {}).get("date")
|
||||
isAllDay = bool((event.get("start") or {}).get("date") and not (event.get("start") or {}).get("dateTime"))
|
||||
dtstart = _googleIcsDateTime(rawStart)
|
||||
dtend = _googleIcsDateTime(rawEnd)
|
||||
dtstamp = _googleIcsDateTime(event.get("updated")) or datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%SZ")
|
||||
|
||||
lines = [
|
||||
"BEGIN:VCALENDAR",
|
||||
"VERSION:2.0",
|
||||
"PRODID:-//PowerOn//Google-Calendar-Adapter//EN",
|
||||
"CALSCALE:GREGORIAN",
|
||||
"BEGIN:VEVENT",
|
||||
f"UID:{uid}",
|
||||
f"DTSTAMP:{dtstamp}",
|
||||
]
|
||||
if dtstart:
|
||||
lines.append(f"DTSTART;VALUE=DATE:{dtstart}" if isAllDay else f"DTSTART:{dtstart}")
|
||||
if dtend:
|
||||
lines.append(f"DTEND;VALUE=DATE:{dtend}" if isAllDay else f"DTEND:{dtend}")
|
||||
if summary:
|
||||
lines.append(f"SUMMARY:{summary}")
|
||||
if location:
|
||||
lines.append(f"LOCATION:{location}")
|
||||
if description:
|
||||
lines.append(f"DESCRIPTION:{description}")
|
||||
organizer = (event.get("organizer") or {}).get("email")
|
||||
if organizer:
|
||||
lines.append(f"ORGANIZER:mailto:{organizer}")
|
||||
for att in (event.get("attendees") or []):
|
||||
addr = att.get("email")
|
||||
if addr:
|
||||
lines.append(f"ATTENDEE:mailto:{addr}")
|
||||
lines.append("END:VEVENT")
|
||||
lines.append("END:VCALENDAR")
|
||||
return ("\r\n".join(lines) + "\r\n").encode("utf-8")
|
||||
|
||||
|
||||
def _googlePersonLabel(person: Dict[str, Any]) -> str:
|
||||
names = person.get("names") or []
|
||||
if names:
|
||||
primary = names[0]
|
||||
display = primary.get("displayName") or ""
|
||||
if display:
|
||||
return display
|
||||
given = primary.get("givenName") or ""
|
||||
family = primary.get("familyName") or ""
|
||||
full = f"{given} {family}".strip()
|
||||
if full:
|
||||
return full
|
||||
orgs = person.get("organizations") or []
|
||||
if orgs and orgs[0].get("name"):
|
||||
return orgs[0]["name"]
|
||||
emails = person.get("emailAddresses") or []
|
||||
if emails and emails[0].get("value"):
|
||||
return emails[0]["value"]
|
||||
return ""
|
||||
|
||||
|
||||
def _googlePersonToVcard(person: Dict[str, Any]) -> bytes:
|
||||
"""Build a vCard 3.0 from a Google People API person payload."""
|
||||
names = person.get("names") or []
|
||||
primaryName = names[0] if names else {}
|
||||
given = primaryName.get("givenName") or ""
|
||||
family = primaryName.get("familyName") or ""
|
||||
middle = primaryName.get("middleName") or ""
|
||||
fn = primaryName.get("displayName") or _googlePersonLabel(person) or ""
|
||||
|
||||
lines = [
|
||||
"BEGIN:VCARD",
|
||||
"VERSION:3.0",
|
||||
f"N:{family};{given};{middle};;",
|
||||
f"FN:{fn}",
|
||||
]
|
||||
orgs = person.get("organizations") or []
|
||||
if orgs:
|
||||
org = orgs[0]
|
||||
orgVal = org.get("name") or ""
|
||||
if org.get("department"):
|
||||
orgVal = f"{orgVal};{org['department']}"
|
||||
if orgVal:
|
||||
lines.append(f"ORG:{orgVal}")
|
||||
if org.get("title"):
|
||||
lines.append(f"TITLE:{org['title']}")
|
||||
for em in (person.get("emailAddresses") or []):
|
||||
addr = em.get("value")
|
||||
if not addr:
|
||||
continue
|
||||
emailType = (em.get("type") or "INTERNET").upper()
|
||||
lines.append(f"EMAIL;TYPE={emailType}:{addr}")
|
||||
for ph in (person.get("phoneNumbers") or []):
|
||||
val = ph.get("value")
|
||||
if not val:
|
||||
continue
|
||||
phType = (ph.get("type") or "VOICE").upper()
|
||||
lines.append(f"TEL;TYPE={phType}:{val}")
|
||||
for addr in (person.get("addresses") or []):
|
||||
street = addr.get("streetAddress") or ""
|
||||
city = addr.get("city") or ""
|
||||
region = addr.get("region") or ""
|
||||
postal = addr.get("postalCode") or ""
|
||||
country = addr.get("country") or ""
|
||||
if any([street, city, region, postal, country]):
|
||||
adrType = (addr.get("type") or "OTHER").upper()
|
||||
lines.append(f"ADR;TYPE={adrType}:;;{street};{city};{region};{postal};{country}")
|
||||
bios = person.get("biographies") or []
|
||||
if bios and bios[0].get("value"):
|
||||
lines.append(f"NOTE:{_googleIcsEscape(bios[0]['value'])}")
|
||||
lines.append(f"UID:{person.get('resourceName', '')}")
|
||||
lines.append("END:VCARD")
|
||||
return ("\r\n".join(lines) + "\r\n").encode("utf-8")
|
||||
|
||||
|
||||
class GoogleConnector(ProviderConnector):
|
||||
"""Google ProviderConnector -- 1 connection -> Drive + Gmail."""
|
||||
"""Google ProviderConnector -- 1 connection -> Drive + Gmail + Calendar + Contacts."""
|
||||
|
||||
_SERVICE_MAP = {
|
||||
"drive": DriveAdapter,
|
||||
"gmail": GmailAdapter,
|
||||
"calendar": CalendarAdapter,
|
||||
"contact": ContactsAdapter,
|
||||
}
|
||||
|
||||
def getAvailableServices(self) -> List[str]:
|
||||
|
|
|
|||
3
modules/connectors/providerInfomaniak/__init__.py
Normal file
3
modules/connectors/providerInfomaniak/__init__.py
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""Infomaniak Provider Connector -- 1 Connection : n Services (kDrive, Mail)."""
|
||||
961
modules/connectors/providerInfomaniak/connectorInfomaniak.py
Normal file
961
modules/connectors/providerInfomaniak/connectorInfomaniak.py
Normal file
|
|
@ -0,0 +1,961 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""Infomaniak ProviderConnector -- kDrive + Calendar + Contacts via PAT.
|
||||
|
||||
The PAT carries one or more of these scopes:
|
||||
|
||||
- ``drive`` -> kDrive (active here)
|
||||
- ``workspace:calendar`` -> Calendar (active here)
|
||||
- ``workspace:contact`` -> Contacts (active here)
|
||||
- ``workspace:mail`` -> Mail (no public PAT-friendly endpoint yet)
|
||||
|
||||
Mail is intentionally NOT in ``_SERVICE_MAP`` until we find a
|
||||
PAT-authenticated endpoint -- the public ``/1/mail`` and
|
||||
``mail.infomaniak.com/api/pim/mail*`` routes either don't exist (404
|
||||
nginx) or 302 to OAuth, so wiring a stub adapter would only confuse
|
||||
users.
|
||||
|
||||
Path conventions (leading slash, ``ServiceAdapter`` paths always start with
|
||||
``/``):
|
||||
kDrive (api.infomaniak.com, requires ``account_id`` query arg):
|
||||
/ -- list drives in the user's account
|
||||
/{driveId} -- root folder of a drive
|
||||
/{driveId}/{fileId} -- folder children OR file (download)
|
||||
Calendar (calendar.infomaniak.com PIM):
|
||||
/ -- list calendars accessible to the user
|
||||
/{calendarId} -- events of one calendar
|
||||
/{calendarId}/{eventId} -- single event (.ics download)
|
||||
Contacts (contacts.infomaniak.com PIM):
|
||||
/ -- list address books
|
||||
/{addressBookId} -- contacts in that address book
|
||||
/{addressBookId}/{contactId} -- single contact (.vcf download)
|
||||
"""
|
||||
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Dict, List, Optional, TypedDict
|
||||
from urllib.parse import quote
|
||||
|
||||
import aiohttp
|
||||
|
||||
from modules.connectors.connectorProviderBase import (
|
||||
ProviderConnector,
|
||||
ServiceAdapter,
|
||||
DownloadResult,
|
||||
)
|
||||
from modules.datamodels.datamodelDataSource import ExternalEntry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_API_BASE = "https://api.infomaniak.com"
|
||||
_CALENDAR_BASE = "https://calendar.infomaniak.com"
|
||||
_CONTACTS_BASE = "https://contacts.infomaniak.com"
|
||||
_PIM_PREFIX = "/api/pim"
|
||||
|
||||
|
||||
class InfomaniakOwnerIdentity(TypedDict):
|
||||
"""Minimal identity payload for the PAT owner.
|
||||
|
||||
``accountId`` is the only field the kDrive adapter needs at runtime.
|
||||
``displayName`` is harvested for the connection UI; both fields come
|
||||
from the same PIM Owner record.
|
||||
"""
|
||||
|
||||
accountId: int
|
||||
displayName: Optional[str]
|
||||
|
||||
|
||||
class InfomaniakIdentityError(RuntimeError):
|
||||
"""Raised when no owner identity can be derived from a PAT."""
|
||||
|
||||
|
||||
async def _infomaniakGet(
|
||||
token: str,
|
||||
endpoint: str,
|
||||
baseUrl: str = _API_BASE,
|
||||
) -> Dict[str, Any]:
|
||||
"""Single GET against an Infomaniak host.
|
||||
|
||||
``endpoint`` is appended to ``baseUrl`` (handles leading slash). Returns
|
||||
parsed JSON, or ``{'error': ...}`` for non-2xx / network failures.
|
||||
"""
|
||||
url = f"{baseUrl.rstrip('/')}/{endpoint.lstrip('/')}"
|
||||
headers = {"Authorization": f"Bearer {token}", "Accept": "application/json"}
|
||||
timeout = aiohttp.ClientTimeout(total=20)
|
||||
try:
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
async with session.get(url, headers=headers, allow_redirects=False) as resp:
|
||||
if resp.status in (200, 201):
|
||||
return await resp.json()
|
||||
errorText = await resp.text()
|
||||
logger.warning(f"Infomaniak GET {url} -> {resp.status}: {errorText[:300]}")
|
||||
return {"error": f"{resp.status}: {errorText[:200]}"}
|
||||
except Exception as e:
|
||||
logger.error(f"Infomaniak GET {url} crashed: {e}")
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
async def _infomaniakDownload(
|
||||
token: str,
|
||||
endpoint: str,
|
||||
baseUrl: str = _API_BASE,
|
||||
) -> Optional[bytes]:
|
||||
"""Binary download from an Infomaniak host. Returns bytes or ``None``.
|
||||
|
||||
Unlike :func:`_infomaniakGet`, this follows redirects: kDrive's
|
||||
``/2/drive/{driveId}/files/{fileId}/download`` answers with
|
||||
``302 -> presigned CDN URL`` (standard for bandwidth-heavy
|
||||
transfers), and the same pattern shows up on Calendar/Contacts
|
||||
export endpoints. Refusing to follow would lose every download.
|
||||
The Authorization header is preserved across the redirect by
|
||||
aiohttp because the host is the same Infomaniak property.
|
||||
"""
|
||||
url = f"{baseUrl.rstrip('/')}/{endpoint.lstrip('/')}"
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
timeout = aiohttp.ClientTimeout(total=120)
|
||||
try:
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
async with session.get(url, headers=headers, allow_redirects=True) as resp:
|
||||
if resp.status == 200:
|
||||
return await resp.read()
|
||||
logger.warning(
|
||||
f"Infomaniak download {url} -> {resp.status}: "
|
||||
f"{(await resp.text())[:300]}"
|
||||
)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Infomaniak download {url} crashed: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def _unwrapData(payload: Any) -> Any:
|
||||
"""Infomaniak wraps successful responses as ``{result: 'success', data: ...}``."""
|
||||
if isinstance(payload, dict) and "data" in payload and "result" in payload:
|
||||
return payload.get("data")
|
||||
return payload
|
||||
|
||||
|
||||
def _firstOwnerRecord(payload: Any, listKey: str) -> Optional[Dict[str, Any]]:
|
||||
"""Pick the first user-owned record from a PIM list response.
|
||||
|
||||
Both PIM Calendar (``calendars``) and PIM Contacts (``addressbooks``)
|
||||
return ``{result, data: {<listKey>: [...]}}``. Owner-records have a
|
||||
positive numeric ``user_id`` and an integer ``account_id``; shared /
|
||||
public records (e.g. holiday calendars) carry ``user_id = -1`` and
|
||||
``account_id = null`` and are skipped.
|
||||
"""
|
||||
data = _unwrapData(payload) if payload else None
|
||||
if not isinstance(data, dict):
|
||||
return None
|
||||
records = data.get(listKey)
|
||||
if not isinstance(records, list):
|
||||
return None
|
||||
for rec in records:
|
||||
if not isinstance(rec, dict):
|
||||
continue
|
||||
userId = rec.get("user_id")
|
||||
accountId = rec.get("account_id")
|
||||
if isinstance(userId, int) and userId > 0 and isinstance(accountId, int):
|
||||
return rec
|
||||
return None
|
||||
|
||||
|
||||
async def resolveOwnerIdentity(token: str) -> InfomaniakOwnerIdentity:
|
||||
"""Derive the PAT owner's display identity from PIM Calendar / Contacts.
|
||||
|
||||
Used purely for UI display on the connection (``externalUsername`` /
|
||||
``externalId``). The PIM endpoints embed the kSuite ``account_id``
|
||||
and the user's display name in their owner records, which is what
|
||||
the ConnectionsPage shows.
|
||||
|
||||
Calendar is queried first because it is the more universally
|
||||
provisioned PIM service; Contacts is the equivalent fallback.
|
||||
Raises :class:`InfomaniakIdentityError` when neither yields an
|
||||
owner record.
|
||||
"""
|
||||
sources = (
|
||||
(_CALENDAR_BASE, f"{_PIM_PREFIX}/calendar", "calendars"),
|
||||
(_CONTACTS_BASE, f"{_PIM_PREFIX}/addressbook", "addressbooks"),
|
||||
)
|
||||
for baseUrl, endpoint, listKey in sources:
|
||||
payload = await _infomaniakGet(token, endpoint, baseUrl=baseUrl)
|
||||
if isinstance(payload, dict) and payload.get("error"):
|
||||
continue
|
||||
owner = _firstOwnerRecord(payload, listKey)
|
||||
if owner is None:
|
||||
continue
|
||||
return InfomaniakOwnerIdentity(
|
||||
accountId=int(owner["account_id"]),
|
||||
displayName=owner.get("name") or None,
|
||||
)
|
||||
raise InfomaniakIdentityError(
|
||||
"Could not resolve Infomaniak owner identity from PIM Calendar or "
|
||||
"Contacts. The PAT must carry 'workspace:calendar' or "
|
||||
"'workspace:contact' so we can label the connection."
|
||||
)
|
||||
|
||||
|
||||
async def listAccessibleDrives(token: str) -> List[Dict[str, Any]]:
|
||||
"""Return every kDrive the PAT can reach (admin OR user role).
|
||||
|
||||
Hits ``GET /2/drive/init?with=drives`` -- the only PAT-friendly
|
||||
endpoint that enumerates a user's drives **independently of the
|
||||
Drive-Manager admin role**. The plain ``/2/drive?account_id=...``
|
||||
listing is filtered to drives where the caller is an admin and
|
||||
therefore returns an empty array for everyone with ``role: user``,
|
||||
even though the same user can read/write the drive's files via
|
||||
``/2/drive/{driveId}/...``.
|
||||
|
||||
The endpoint requires only the ``drive`` PAT scope -- no
|
||||
``accounts``, no ``user_info``, no admin permission. Each entry
|
||||
matches the shape documented for ``GET /2/drive/{drive_id}``
|
||||
(``id``, ``name``, ``account_id``, ``role``, ...).
|
||||
|
||||
Raises :class:`InfomaniakIdentityError` when the PAT does not carry
|
||||
the ``drive`` scope or the response is malformed.
|
||||
"""
|
||||
payload = await _infomaniakGet(token, "/2/drive/init?with=drives")
|
||||
if isinstance(payload, dict) and payload.get("error"):
|
||||
raise InfomaniakIdentityError(
|
||||
"Could not list Infomaniak kDrives. The PAT must carry the "
|
||||
f"'drive' scope (/2/drive/init said: {payload['error']})."
|
||||
)
|
||||
data = _unwrapData(payload)
|
||||
if not isinstance(data, dict):
|
||||
raise InfomaniakIdentityError(
|
||||
"Unexpected /2/drive/init response shape (expected an object)."
|
||||
)
|
||||
drives = data.get("drives") or []
|
||||
if not isinstance(drives, list):
|
||||
raise InfomaniakIdentityError(
|
||||
"Unexpected /2/drive/init response: 'drives' is not a list."
|
||||
)
|
||||
return [d for d in drives if isinstance(d, dict) and d.get("id")]
|
||||
|
||||
|
||||
def _lastNumericSegment(segments: List[str]) -> Optional[str]:
|
||||
"""Return the last all-digit segment (kDrive file/folder IDs are int).
|
||||
|
||||
The agent sometimes appends the human-readable filename to a path,
|
||||
e.g. ``/2980592/12/platform-overview.html``. The kDrive API does
|
||||
not accept names -- only numeric IDs -- so we strip trailing
|
||||
non-numeric segments and pick the last integer ID.
|
||||
Returns ``None`` if no numeric segment exists.
|
||||
"""
|
||||
for seg in reversed(segments):
|
||||
if seg.isdigit():
|
||||
return seg
|
||||
return None
|
||||
|
||||
|
||||
class KdriveAdapter(ServiceAdapter):
|
||||
"""kDrive ServiceAdapter -- browse drives, folders, files.
|
||||
|
||||
Drive enumeration goes through :func:`listAccessibleDrives` which
|
||||
calls ``/2/drive/init?with=drives``. That endpoint returns every
|
||||
drive the PAT can read regardless of the Drive-Manager admin role
|
||||
-- unlike the documented ``/2/drive?account_id=...`` listing which
|
||||
silently returns an empty array for users with ``role: 'user'``
|
||||
(the most common case for kSuite members).
|
||||
|
||||
The drive list is cached on the adapter instance so each browse
|
||||
pays for one ``/2/drive/init`` call at most.
|
||||
|
||||
File-vs-folder handling: a DataSource may point at a single file
|
||||
(e.g. ``/{driveId}/{fileId}`` where ``fileId`` is a regular file).
|
||||
Calling ``/files/{fileId}/files`` on a file answers
|
||||
``400 destination_not_a_directory`` -- so :meth:`browse` first
|
||||
fetches the item's metadata and, if ``type=file``, returns a
|
||||
one-element list describing the file itself instead of pretending
|
||||
the directory is empty.
|
||||
"""
|
||||
|
||||
def __init__(self, accessToken: str):
|
||||
self._token = accessToken
|
||||
self._drives: Optional[List[Dict[str, Any]]] = None
|
||||
|
||||
async def _ensureDrives(self) -> List[Dict[str, Any]]:
|
||||
if self._drives is not None:
|
||||
return self._drives
|
||||
self._drives = await listAccessibleDrives(self._token)
|
||||
return self._drives
|
||||
|
||||
async def _fetchItemMeta(self, driveId: str, fileId: str) -> Optional[Dict[str, Any]]:
|
||||
"""Return the kDrive file/folder metadata dict, or ``None`` on error."""
|
||||
meta = await _infomaniakGet(self._token, f"/2/drive/{driveId}/files/{fileId}")
|
||||
if not isinstance(meta, dict) or meta.get("error"):
|
||||
return None
|
||||
data = _unwrapData(meta)
|
||||
return data if isinstance(data, dict) else None
|
||||
|
||||
async def browse(
|
||||
self,
|
||||
path: str,
|
||||
filter: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[ExternalEntry]:
|
||||
cleanPath = (path or "").strip("/")
|
||||
segments = [s for s in cleanPath.split("/") if s]
|
||||
|
||||
if not segments:
|
||||
return await self._listDrives()
|
||||
|
||||
driveId = segments[0]
|
||||
if len(segments) == 1:
|
||||
return await self._listChildren(driveId, fileId=None, limit=limit)
|
||||
|
||||
fileId = _lastNumericSegment(segments[1:])
|
||||
if fileId is None:
|
||||
return []
|
||||
|
||||
meta = await self._fetchItemMeta(driveId, fileId)
|
||||
if meta is not None and meta.get("type") == "file":
|
||||
return [ExternalEntry(
|
||||
name=meta.get("name") or fileId,
|
||||
path=f"/{driveId}/{fileId}",
|
||||
isFolder=False,
|
||||
size=meta.get("size"),
|
||||
mimeType=meta.get("mime_type"),
|
||||
lastModified=meta.get("last_modified_at"),
|
||||
metadata={"id": fileId, "kind": "file"},
|
||||
)]
|
||||
return await self._listChildren(driveId, fileId=fileId, limit=limit)
|
||||
|
||||
async def _listDrives(self) -> List[ExternalEntry]:
|
||||
drives = await self._ensureDrives()
|
||||
entries: List[ExternalEntry] = []
|
||||
for drive in drives:
|
||||
driveId = str(drive.get("id", ""))
|
||||
if not driveId:
|
||||
continue
|
||||
entries.append(ExternalEntry(
|
||||
name=drive.get("name") or driveId,
|
||||
path=f"/{driveId}",
|
||||
isFolder=True,
|
||||
metadata={
|
||||
"id": driveId,
|
||||
"kind": "drive",
|
||||
"accountId": drive.get("account_id"),
|
||||
"role": drive.get("role"),
|
||||
},
|
||||
))
|
||||
return entries
|
||||
|
||||
async def _listChildren(
|
||||
self,
|
||||
driveId: str,
|
||||
fileId: Optional[str],
|
||||
limit: Optional[int],
|
||||
) -> List[ExternalEntry]:
|
||||
if fileId is None:
|
||||
endpoint = f"/2/drive/{driveId}/files"
|
||||
else:
|
||||
endpoint = f"/2/drive/{driveId}/files/{fileId}/files"
|
||||
|
||||
pageSize = max(1, min(int(limit or 200), 1000))
|
||||
endpoint = f"{endpoint}?per_page={pageSize}"
|
||||
|
||||
result = await _infomaniakGet(self._token, endpoint)
|
||||
if isinstance(result, dict) and result.get("error"):
|
||||
logger.warning(
|
||||
f"kDrive list-children {driveId}/{fileId or 'root'} failed: {result['error']}"
|
||||
)
|
||||
return []
|
||||
data = _unwrapData(result)
|
||||
items = data if isinstance(data, list) else data.get("items", []) if isinstance(data, dict) else []
|
||||
|
||||
entries: List[ExternalEntry] = []
|
||||
for item in items:
|
||||
itemId = str(item.get("id", ""))
|
||||
if not itemId:
|
||||
continue
|
||||
isFolder = item.get("type") == "dir"
|
||||
entries.append(ExternalEntry(
|
||||
name=item.get("name", itemId),
|
||||
path=f"/{driveId}/{itemId}",
|
||||
isFolder=isFolder,
|
||||
size=item.get("size") if not isFolder else None,
|
||||
mimeType=item.get("mime_type") if not isFolder else None,
|
||||
lastModified=item.get("last_modified_at"),
|
||||
metadata={"id": itemId, "kind": item.get("type", "")},
|
||||
))
|
||||
return entries
|
||||
|
||||
async def download(self, path: str) -> DownloadResult:
|
||||
segments = [s for s in (path or "").strip("/").split("/") if s]
|
||||
if len(segments) < 2:
|
||||
return DownloadResult()
|
||||
driveId = segments[0]
|
||||
# Agent may append the filename: ``/{driveId}/{fileId}/{name}``.
|
||||
# Pull the last numeric segment instead of trusting segments[-1].
|
||||
fileId = _lastNumericSegment(segments[1:])
|
||||
if fileId is None:
|
||||
return DownloadResult()
|
||||
|
||||
meta = await self._fetchItemMeta(driveId, fileId)
|
||||
fileName = (meta or {}).get("name") or fileId
|
||||
mimeType = (meta or {}).get("mime_type") or "application/octet-stream"
|
||||
|
||||
content = await _infomaniakDownload(
|
||||
self._token, f"/2/drive/{driveId}/files/{fileId}/download"
|
||||
)
|
||||
if content is None:
|
||||
return DownloadResult()
|
||||
return DownloadResult(data=content, fileName=fileName, mimeType=mimeType)
|
||||
|
||||
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||
return {"error": "kDrive upload not yet implemented"}
|
||||
|
||||
async def search(
|
||||
self,
|
||||
query: str,
|
||||
path: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[ExternalEntry]:
|
||||
segments = [s for s in (path or "").strip("/").split("/") if s]
|
||||
if not segments:
|
||||
drives = await self._listDrives()
|
||||
if not drives:
|
||||
return []
|
||||
driveId = (drives[0].metadata or {}).get("id") or drives[0].path.strip("/")
|
||||
else:
|
||||
driveId = segments[0]
|
||||
|
||||
pageSize = max(1, min(int(limit or 50), 200))
|
||||
endpoint = f"/2/drive/{driveId}/files/search?query={query}&per_page={pageSize}"
|
||||
result = await _infomaniakGet(self._token, endpoint)
|
||||
if isinstance(result, dict) and result.get("error"):
|
||||
return []
|
||||
data = _unwrapData(result)
|
||||
items = data if isinstance(data, list) else data.get("items", []) if isinstance(data, dict) else []
|
||||
|
||||
entries: List[ExternalEntry] = []
|
||||
for item in items:
|
||||
itemId = str(item.get("id", ""))
|
||||
if not itemId:
|
||||
continue
|
||||
isFolder = item.get("type") == "dir"
|
||||
entries.append(ExternalEntry(
|
||||
name=item.get("name", itemId),
|
||||
path=f"/{driveId}/{itemId}",
|
||||
isFolder=isFolder,
|
||||
size=item.get("size") if not isFolder else None,
|
||||
mimeType=item.get("mime_type") if not isFolder else None,
|
||||
metadata={"id": itemId},
|
||||
))
|
||||
return entries
|
||||
|
||||
|
||||
def _safeFileName(label: str, fallback: str) -> str:
|
||||
"""Sanitize a string for use as a filename. Trims and caps at 80 chars."""
|
||||
cleaned = re.sub(r'[<>:"/\\|?*\x00-\x1f]', "_", str(label or "")).strip(". ")
|
||||
return cleaned[:80] or fallback
|
||||
|
||||
|
||||
class CalendarAdapter(ServiceAdapter):
|
||||
"""Infomaniak Calendar adapter -- browse calendars + events, .ics download.
|
||||
|
||||
Uses the public PIM endpoints at ``calendar.infomaniak.com/api/pim``,
|
||||
which authenticate with the PAT scope ``workspace:calendar``.
|
||||
|
||||
Path layout:
|
||||
``/`` -> list calendars
|
||||
``/{calendarId}`` -> list events of that calendar
|
||||
``/{calendarId}/{eventId}`` -> single event (download as .ics)
|
||||
|
||||
Endpoint particulars:
|
||||
Listing events runs against ``/api/pim/event`` with the calendar
|
||||
id as a query arg (the per-calendar nested route
|
||||
``/calendar/{id}/event`` is **not** PAT-friendly -- it 302s to the
|
||||
OAuth login page). Infomaniak enforces a hard ``from``/``to``
|
||||
window of less than 3 months, so this adapter queries a fixed
|
||||
90-day window centered on today (30 days back, 60 days forward),
|
||||
which covers typical UDB browsing. Date format is ``Y-m-d H:i:s``.
|
||||
Event detail and ``.ics`` export are addressed by event id alone
|
||||
(``/api/pim/event/{eventId}`` and ``.../export``); the calendar
|
||||
id from the path is kept only for tree-navigation continuity.
|
||||
"""
|
||||
|
||||
# Vendor enforces ``Range must be lower than 3 months``. We stay
|
||||
# comfortably below to keep one call per browse.
|
||||
_PAST_DAYS = 30
|
||||
_FUTURE_DAYS = 60
|
||||
|
||||
def __init__(self, accessToken: str):
|
||||
self._token = accessToken
|
||||
|
||||
async def browse(
|
||||
self,
|
||||
path: str,
|
||||
filter: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[ExternalEntry]:
|
||||
segments = [s for s in (path or "").strip("/").split("/") if s]
|
||||
if not segments:
|
||||
return await self._listCalendars()
|
||||
if len(segments) == 1:
|
||||
return await self._listEvents(segments[0], limit=limit)
|
||||
return []
|
||||
|
||||
async def _listCalendars(self) -> List[ExternalEntry]:
|
||||
result = await _infomaniakGet(
|
||||
self._token, f"{_PIM_PREFIX}/calendar", baseUrl=_CALENDAR_BASE
|
||||
)
|
||||
if isinstance(result, dict) and result.get("error"):
|
||||
logger.warning(f"Calendar list-calendars failed: {result['error']}")
|
||||
return []
|
||||
data = _unwrapData(result)
|
||||
calendars = data.get("calendars", []) if isinstance(data, dict) else []
|
||||
entries: List[ExternalEntry] = []
|
||||
for cal in calendars:
|
||||
calId = str(cal.get("id", ""))
|
||||
if not calId:
|
||||
continue
|
||||
isShared = (cal.get("user_id") or 0) <= 0 or cal.get("account_id") is None
|
||||
entries.append(ExternalEntry(
|
||||
name=cal.get("name") or calId,
|
||||
path=f"/{calId}",
|
||||
isFolder=True,
|
||||
metadata={
|
||||
"id": calId,
|
||||
"kind": "calendar",
|
||||
"color": cal.get("color"),
|
||||
"shared": isShared,
|
||||
"default": bool(cal.get("default")),
|
||||
},
|
||||
))
|
||||
return entries
|
||||
|
||||
def _eventWindow(self) -> tuple:
|
||||
now = datetime.now(timezone.utc)
|
||||
fromStr = (now - timedelta(days=self._PAST_DAYS)).strftime("%Y-%m-%d %H:%M:%S")
|
||||
toStr = (now + timedelta(days=self._FUTURE_DAYS)).strftime("%Y-%m-%d %H:%M:%S")
|
||||
return fromStr, toStr
|
||||
|
||||
async def _listEvents(
|
||||
self,
|
||||
calendarId: str,
|
||||
limit: Optional[int],
|
||||
) -> List[ExternalEntry]:
|
||||
fromStr, toStr = self._eventWindow()
|
||||
endpoint = (
|
||||
f"{_PIM_PREFIX}/event"
|
||||
f"?calendar_id={calendarId}"
|
||||
f"&from={quote(fromStr)}"
|
||||
f"&to={quote(toStr)}"
|
||||
)
|
||||
result = await _infomaniakGet(self._token, endpoint, baseUrl=_CALENDAR_BASE)
|
||||
if isinstance(result, dict) and result.get("error"):
|
||||
logger.warning(f"Calendar list-events {calendarId} failed: {result['error']}")
|
||||
return []
|
||||
data = _unwrapData(result)
|
||||
events = data if isinstance(data, list) else data.get("events", []) if isinstance(data, dict) else []
|
||||
entries: List[ExternalEntry] = []
|
||||
for ev in events:
|
||||
evId = str(ev.get("id") or ev.get("uid") or "")
|
||||
if not evId:
|
||||
continue
|
||||
title = ev.get("title") or ev.get("summary") or "(no title)"
|
||||
entries.append(ExternalEntry(
|
||||
name=title,
|
||||
path=f"/{calendarId}/{evId}",
|
||||
isFolder=False,
|
||||
metadata={
|
||||
"id": evId,
|
||||
"kind": "event",
|
||||
"start": ev.get("start"),
|
||||
"end": ev.get("end"),
|
||||
"location": ev.get("location"),
|
||||
"updated": ev.get("updated_at"),
|
||||
},
|
||||
))
|
||||
if limit is not None:
|
||||
return entries[: int(limit)]
|
||||
return entries
|
||||
|
||||
async def download(self, path: str) -> DownloadResult:
|
||||
segments = [s for s in (path or "").strip("/").split("/") if s]
|
||||
if len(segments) < 2:
|
||||
return DownloadResult()
|
||||
eventId = segments[1]
|
||||
|
||||
content = await _infomaniakDownload(
|
||||
self._token,
|
||||
f"{_PIM_PREFIX}/event/{eventId}/export",
|
||||
baseUrl=_CALENDAR_BASE,
|
||||
)
|
||||
if content is None:
|
||||
return DownloadResult()
|
||||
|
||||
title = eventId
|
||||
meta = await _infomaniakGet(
|
||||
self._token,
|
||||
f"{_PIM_PREFIX}/event/{eventId}",
|
||||
baseUrl=_CALENDAR_BASE,
|
||||
)
|
||||
if isinstance(meta, dict) and not meta.get("error"):
|
||||
unwrapped = _unwrapData(meta)
|
||||
if isinstance(unwrapped, dict):
|
||||
event = unwrapped.get("event") if "event" in unwrapped else unwrapped
|
||||
if isinstance(event, dict):
|
||||
title = event.get("title") or event.get("summary") or eventId
|
||||
return DownloadResult(
|
||||
data=content,
|
||||
fileName=f"{_safeFileName(title, 'event')}.ics",
|
||||
mimeType="text/calendar",
|
||||
)
|
||||
|
||||
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||
return {"error": "Calendar upload not yet implemented"}
|
||||
|
||||
async def search(
|
||||
self,
|
||||
query: str,
|
||||
path: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[ExternalEntry]:
|
||||
# The PIM Calendar API has no public search endpoint we can rely on.
|
||||
# Cheap fallback: list events in the current calendar (or all of
|
||||
# them) within the default window and filter case-insensitively on
|
||||
# title/location.
|
||||
calendars = (
|
||||
await self._listCalendars()
|
||||
if not path
|
||||
else [ExternalEntry(name="", path=path, isFolder=True)]
|
||||
)
|
||||
if not calendars:
|
||||
return []
|
||||
needle = (query or "").strip().lower()
|
||||
results: List[ExternalEntry] = []
|
||||
for cal in calendars:
|
||||
calId = (cal.metadata or {}).get("id") or cal.path.strip("/")
|
||||
for ev in await self._listEvents(calId, limit=limit):
|
||||
hay = " ".join(
|
||||
str(v) for v in (
|
||||
ev.name,
|
||||
(ev.metadata or {}).get("location") or "",
|
||||
)
|
||||
).lower()
|
||||
if not needle or needle in hay:
|
||||
results.append(ev)
|
||||
if limit is not None and len(results) >= int(limit):
|
||||
break
|
||||
return results[: int(limit)] if limit is not None else results
|
||||
|
||||
|
||||
def _vcardEscape(value: Any) -> str:
|
||||
"""Escape a value for vCard 3.0 -- backslash, comma, semicolon, newline."""
|
||||
text = "" if value is None else str(value)
|
||||
return (
|
||||
text.replace("\\", "\\\\")
|
||||
.replace(";", "\\;")
|
||||
.replace(",", "\\,")
|
||||
.replace("\r\n", "\\n")
|
||||
.replace("\n", "\\n")
|
||||
)
|
||||
|
||||
|
||||
def _renderInfomaniakVcard(record: Dict[str, Any]) -> str:
|
||||
"""Render an Infomaniak contact record as a vCard 3.0 string.
|
||||
|
||||
The Contacts PIM ``/contact/{id}/export`` endpoint is not PAT-friendly
|
||||
(302s to the OAuth login page), and ``/contact/{id}`` returns 500 with
|
||||
a PAT, so we cannot retrieve the canonical .vcf or detail blob from
|
||||
Infomaniak. Instead we synthesize a vCard 3.0 payload from the
|
||||
listing record fetched with ``with=emails,phones,addresses,details``.
|
||||
|
||||
vCard 3.0 is the common-denominator format universally accepted by
|
||||
Outlook, Google Contacts, Apple Contacts and Thunderbird (4.0 still
|
||||
has poor Outlook import compatibility).
|
||||
"""
|
||||
firstname = record.get("firstname") or ""
|
||||
lastname = record.get("lastname") or ""
|
||||
fullName = (
|
||||
record.get("name")
|
||||
or " ".join(p for p in (firstname, lastname) if p).strip()
|
||||
or "Contact"
|
||||
)
|
||||
organization = record.get("organization") or ""
|
||||
note = record.get("note") or ""
|
||||
emails = record.get("emails") or []
|
||||
phones = record.get("phones") or []
|
||||
addresses = record.get("addresses") or []
|
||||
websites = record.get("websites") or []
|
||||
|
||||
lines = ["BEGIN:VCARD", "VERSION:3.0"]
|
||||
# N: Last;First;Middle;Prefix;Suffix
|
||||
lines.append(f"N:{_vcardEscape(lastname)};{_vcardEscape(firstname)};;;")
|
||||
lines.append(f"FN:{_vcardEscape(fullName)}")
|
||||
if organization:
|
||||
lines.append(f"ORG:{_vcardEscape(organization)}")
|
||||
for email in emails:
|
||||
if isinstance(email, str) and email:
|
||||
lines.append(f"EMAIL;TYPE=INTERNET:{_vcardEscape(email)}")
|
||||
elif isinstance(email, dict) and email.get("address"):
|
||||
lines.append(f"EMAIL;TYPE=INTERNET:{_vcardEscape(email['address'])}")
|
||||
for phone in phones:
|
||||
if isinstance(phone, str) and phone:
|
||||
lines.append(f"TEL:{_vcardEscape(phone)}")
|
||||
elif isinstance(phone, dict) and phone.get("number"):
|
||||
lines.append(f"TEL:{_vcardEscape(phone['number'])}")
|
||||
for addr in addresses:
|
||||
if isinstance(addr, dict):
|
||||
# ADR: PO-Box;Extended;Street;City;Region;Postal;Country
|
||||
lines.append(
|
||||
"ADR:;;"
|
||||
f"{_vcardEscape(addr.get('street'))};"
|
||||
f"{_vcardEscape(addr.get('city'))};"
|
||||
f"{_vcardEscape(addr.get('region'))};"
|
||||
f"{_vcardEscape(addr.get('zip') or addr.get('postal_code'))};"
|
||||
f"{_vcardEscape(addr.get('country'))}"
|
||||
)
|
||||
for site in websites:
|
||||
if isinstance(site, str) and site:
|
||||
lines.append(f"URL:{_vcardEscape(site)}")
|
||||
elif isinstance(site, dict) and site.get("url"):
|
||||
lines.append(f"URL:{_vcardEscape(site['url'])}")
|
||||
if note:
|
||||
lines.append(f"NOTE:{_vcardEscape(note)}")
|
||||
lines.append("END:VCARD")
|
||||
return "\r\n".join(lines) + "\r\n"
|
||||
|
||||
|
||||
class ContactAdapter(ServiceAdapter):
|
||||
"""Infomaniak Contacts adapter -- browse address books + contacts, .vcf download.
|
||||
|
||||
Uses the public PIM endpoint at ``contacts.infomaniak.com/api/pim``,
|
||||
which authenticates with the PAT scope ``workspace:contact``.
|
||||
|
||||
Path layout:
|
||||
``/`` -> list address books
|
||||
``/{addressBookId}`` -> list contacts in that book
|
||||
``/{addressBookId}/{contactId}`` -> single contact (download as .vcf)
|
||||
|
||||
Endpoint particulars:
|
||||
Listing both address books and contacts is PAT-friendly. The
|
||||
contact-listing call uses ``with=emails,phones,addresses,details``
|
||||
so each record arrives with all the fields needed for vCard
|
||||
synthesis -- Infomaniak skips them by default. Detail and export
|
||||
endpoints (``/contact/{id}``, ``/contact/{id}/export``) are **not**
|
||||
PAT-friendly (the former 500s, the latter 302s to OAuth), so the
|
||||
``download`` path re-fetches the listing and renders the vCard
|
||||
ourselves via :func:`_renderInfomaniakVcard`.
|
||||
"""
|
||||
|
||||
_DEFAULT_CONTACT_LIMIT = 200
|
||||
_MAX_CONTACT_LIMIT = 1000
|
||||
_CONTACT_FIELDS = "emails,phones,addresses,details"
|
||||
|
||||
def __init__(self, accessToken: str):
|
||||
self._token = accessToken
|
||||
|
||||
async def browse(
|
||||
self,
|
||||
path: str,
|
||||
filter: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[ExternalEntry]:
|
||||
segments = [s for s in (path or "").strip("/").split("/") if s]
|
||||
if not segments:
|
||||
return await self._listAddressBooks()
|
||||
if len(segments) == 1:
|
||||
return await self._listContacts(segments[0], limit=limit)
|
||||
return []
|
||||
|
||||
async def _listAddressBooks(self) -> List[ExternalEntry]:
|
||||
result = await _infomaniakGet(
|
||||
self._token, f"{_PIM_PREFIX}/addressbook", baseUrl=_CONTACTS_BASE
|
||||
)
|
||||
if isinstance(result, dict) and result.get("error"):
|
||||
logger.warning(f"Contacts list-addressbooks failed: {result['error']}")
|
||||
return []
|
||||
data = _unwrapData(result)
|
||||
books = data.get("addressbooks", []) if isinstance(data, dict) else []
|
||||
entries: List[ExternalEntry] = []
|
||||
for book in books:
|
||||
bookId = str(book.get("id", ""))
|
||||
if not bookId:
|
||||
continue
|
||||
isShared = bool(book.get("is_shared")) or (book.get("user_id") or 0) <= 0
|
||||
# The shared organisation directory has an empty name -- give it a
|
||||
# human label so the UDB tree is not blank.
|
||||
name = book.get("name") or (
|
||||
"Organisation" if book.get("is_dynamic_organisation_member_directory") else bookId
|
||||
)
|
||||
entries.append(ExternalEntry(
|
||||
name=name,
|
||||
path=f"/{bookId}",
|
||||
isFolder=True,
|
||||
metadata={
|
||||
"id": bookId,
|
||||
"kind": "addressbook",
|
||||
"color": book.get("color"),
|
||||
"shared": isShared,
|
||||
"default": bool(book.get("default")),
|
||||
},
|
||||
))
|
||||
return entries
|
||||
|
||||
async def _fetchContacts(
|
||||
self,
|
||||
addressBookId: str,
|
||||
perPage: int,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Raw listing call -- shared by browse and download."""
|
||||
endpoint = (
|
||||
f"{_PIM_PREFIX}/addressbook/{addressBookId}/contact"
|
||||
f"?per_page={perPage}&with={self._CONTACT_FIELDS}"
|
||||
)
|
||||
result = await _infomaniakGet(self._token, endpoint, baseUrl=_CONTACTS_BASE)
|
||||
if isinstance(result, dict) and result.get("error"):
|
||||
logger.warning(
|
||||
f"Contacts list-contacts {addressBookId} failed: {result['error']}"
|
||||
)
|
||||
return []
|
||||
data = _unwrapData(result)
|
||||
if isinstance(data, list):
|
||||
return [c for c in data if isinstance(c, dict)]
|
||||
if isinstance(data, dict):
|
||||
contacts = data.get("contacts", [])
|
||||
return [c for c in contacts if isinstance(c, dict)]
|
||||
return []
|
||||
|
||||
async def _listContacts(
|
||||
self,
|
||||
addressBookId: str,
|
||||
limit: Optional[int],
|
||||
) -> List[ExternalEntry]:
|
||||
effectiveLimit = self._DEFAULT_CONTACT_LIMIT if limit is None else max(
|
||||
1, min(int(limit), self._MAX_CONTACT_LIMIT),
|
||||
)
|
||||
contacts = await self._fetchContacts(addressBookId, perPage=effectiveLimit)
|
||||
entries: List[ExternalEntry] = []
|
||||
for c in contacts:
|
||||
cId = str(c.get("id") or c.get("uid") or "")
|
||||
if not cId:
|
||||
continue
|
||||
firstName = c.get("firstname")
|
||||
lastName = c.get("lastname")
|
||||
displayName = (
|
||||
c.get("name")
|
||||
or " ".join(p for p in (firstName, lastName) if p).strip()
|
||||
or (c.get("emails") or [None])[0]
|
||||
or cId
|
||||
)
|
||||
firstEmail = (c.get("emails") or [None])[0]
|
||||
firstPhone = (c.get("phones") or [None])[0]
|
||||
entries.append(ExternalEntry(
|
||||
name=str(displayName),
|
||||
path=f"/{addressBookId}/{cId}",
|
||||
isFolder=False,
|
||||
metadata={
|
||||
"id": cId,
|
||||
"kind": "contact",
|
||||
"email": firstEmail,
|
||||
"phone": firstPhone,
|
||||
"organization": c.get("organization"),
|
||||
},
|
||||
))
|
||||
return entries
|
||||
|
||||
async def download(self, path: str) -> DownloadResult:
|
||||
segments = [s for s in (path or "").strip("/").split("/") if s]
|
||||
if len(segments) < 2:
|
||||
return DownloadResult()
|
||||
addressBookId, contactId = segments[0], segments[1]
|
||||
|
||||
# The PIM contact-detail endpoint (``/contact/{id}``) returns 500
|
||||
# against a PAT, and ``/contact/{id}/export`` 302s to OAuth. We
|
||||
# therefore re-fetch the listing (which IS PAT-friendly) with all
|
||||
# vCard-relevant fields, then synthesize the .vcf ourselves.
|
||||
contacts = await self._fetchContacts(
|
||||
addressBookId, perPage=self._MAX_CONTACT_LIMIT
|
||||
)
|
||||
record = next((c for c in contacts if str(c.get("id")) == contactId), None)
|
||||
if record is None:
|
||||
logger.warning(
|
||||
f"Contacts download: contact {contactId} not found in book "
|
||||
f"{addressBookId}"
|
||||
)
|
||||
return DownloadResult()
|
||||
|
||||
firstName = record.get("firstname") or ""
|
||||
lastName = record.get("lastname") or ""
|
||||
displayName = (
|
||||
record.get("name")
|
||||
or " ".join(p for p in (firstName, lastName) if p).strip()
|
||||
or contactId
|
||||
)
|
||||
vcardText = _renderInfomaniakVcard(record)
|
||||
return DownloadResult(
|
||||
data=vcardText.encode("utf-8"),
|
||||
fileName=f"{_safeFileName(displayName, 'contact')}.vcf",
|
||||
mimeType="text/vcard",
|
||||
)
|
||||
|
||||
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||
return {"error": "Contacts upload not yet implemented"}
|
||||
|
||||
async def search(
|
||||
self,
|
||||
query: str,
|
||||
path: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[ExternalEntry]:
|
||||
# No public search endpoint -- list contacts of the current (or all)
|
||||
# address books and filter client-side on display name / email.
|
||||
books = (
|
||||
await self._listAddressBooks()
|
||||
if not path
|
||||
else [ExternalEntry(name="", path=path, isFolder=True)]
|
||||
)
|
||||
if not books:
|
||||
return []
|
||||
needle = (query or "").strip().lower()
|
||||
results: List[ExternalEntry] = []
|
||||
for book in books:
|
||||
bookId = (book.metadata or {}).get("id") or book.path.strip("/")
|
||||
for c in await self._listContacts(bookId, limit=limit):
|
||||
hay = " ".join(
|
||||
str(v) for v in (
|
||||
c.name,
|
||||
(c.metadata or {}).get("email") or "",
|
||||
(c.metadata or {}).get("organization") or "",
|
||||
)
|
||||
).lower()
|
||||
if not needle or needle in hay:
|
||||
results.append(c)
|
||||
if limit is not None and len(results) >= int(limit):
|
||||
break
|
||||
return results[: int(limit)] if limit is not None else results
|
||||
|
||||
|
||||
class InfomaniakConnector(ProviderConnector):
|
||||
"""Infomaniak ProviderConnector -- kDrive + Calendar + Contacts today.
|
||||
|
||||
Mail is reserved on the PAT (scope ``workspace:mail``) but not wired
|
||||
up here yet -- Infomaniak has no public PAT-friendly Mail endpoint
|
||||
today (the PIM Mail routes 302 to OAuth, the legacy ``/api/mail`` route
|
||||
301-redirects to an internal Cyrus port). Once a working endpoint is
|
||||
found, the corresponding adapter can be slotted into ``_SERVICE_MAP``
|
||||
without any token rotation on the user side.
|
||||
"""
|
||||
|
||||
_SERVICE_MAP = {
|
||||
"kdrive": KdriveAdapter,
|
||||
"calendar": CalendarAdapter,
|
||||
"contact": ContactAdapter,
|
||||
}
|
||||
|
||||
def getAvailableServices(self) -> List[str]:
|
||||
return list(self._SERVICE_MAP.keys())
|
||||
|
||||
def getServiceAdapter(self, service: str) -> ServiceAdapter:
|
||||
adapterClass = self._SERVICE_MAP.get(service)
|
||||
if not adapterClass:
|
||||
raise ValueError(
|
||||
f"Unknown Infomaniak service: {service}. "
|
||||
f"Available: {list(self._SERVICE_MAP.keys())}"
|
||||
)
|
||||
return adapterClass(self.accessToken)
|
||||
|
|
@ -126,6 +126,11 @@ def _stripGraphBase(url: str) -> str:
|
|||
|
||||
def _graphItemToExternalEntry(item: Dict[str, Any], basePath: str = "") -> ExternalEntry:
|
||||
isFolder = "folder" in item
|
||||
# Graph exposes the driveItem content hash as ``eTag`` (quoted) or
|
||||
# ``cTag``; we normalise to a "revision" string so callers can use it as a
|
||||
# stable ``contentVersion`` for idempotent ingestion without re-downloading
|
||||
# file bytes.
|
||||
revision = item.get("eTag") or item.get("cTag")
|
||||
return ExternalEntry(
|
||||
name=item.get("name", ""),
|
||||
path=f"{basePath}/{item.get('name', '')}" if basePath else item.get("name", ""),
|
||||
|
|
@ -137,6 +142,9 @@ def _graphItemToExternalEntry(item: Dict[str, Any], basePath: str = "") -> Exter
|
|||
"id": item.get("id"),
|
||||
"webUrl": item.get("webUrl"),
|
||||
"childCount": item.get("folder", {}).get("childCount") if isFolder else None,
|
||||
"revision": revision,
|
||||
"lastModifiedDateTime": item.get("lastModifiedDateTime"),
|
||||
"parentReference": item.get("parentReference", {}),
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -167,21 +175,36 @@ class SharepointAdapter(_GraphApiMixin, ServiceAdapter):
|
|||
return await self._discoverSites()
|
||||
|
||||
if not folderPath or folderPath == "/":
|
||||
endpoint = f"sites/{siteId}/drive/root/children"
|
||||
endpoint: Optional[str] = f"sites/{siteId}/drive/root/children?$top=200"
|
||||
else:
|
||||
cleanPath = folderPath.lstrip("/")
|
||||
endpoint = f"sites/{siteId}/drive/root:/{cleanPath}:/children"
|
||||
endpoint = f"sites/{siteId}/drive/root:/{cleanPath}:/children?$top=200"
|
||||
|
||||
result = await self._graphGet(endpoint)
|
||||
if "error" in result:
|
||||
logger.warning(f"SharePoint browse failed: {result['error']}")
|
||||
return []
|
||||
# Follow @odata.nextLink until a hard cap is reached so large libraries
|
||||
# are fully enumerated (required for bootstrap). Per-page size uses
|
||||
# Graph's max supported value to minimise round-trips.
|
||||
effectiveLimit = int(limit) if limit is not None else None
|
||||
items: List[Dict[str, Any]] = []
|
||||
hardCap = 5000
|
||||
while endpoint and len(items) < hardCap:
|
||||
result = await self._graphGet(endpoint)
|
||||
if "error" in result:
|
||||
logger.warning(f"SharePoint browse failed: {result['error']}")
|
||||
break
|
||||
for raw in result.get("value", []) or []:
|
||||
items.append(raw)
|
||||
if effectiveLimit is not None and len(items) >= effectiveLimit:
|
||||
break
|
||||
if effectiveLimit is not None and len(items) >= effectiveLimit:
|
||||
break
|
||||
nextLink = result.get("@odata.nextLink")
|
||||
endpoint = _stripGraphBase(nextLink) if nextLink else None
|
||||
|
||||
entries = [_graphItemToExternalEntry(item, path) for item in result.get("value", [])]
|
||||
entries = [_graphItemToExternalEntry(item, path) for item in items]
|
||||
if filter:
|
||||
entries = [e for e in entries if _matchFilter(e, filter)]
|
||||
if limit is not None:
|
||||
entries = entries[: max(1, int(limit))]
|
||||
if effectiveLimit is not None:
|
||||
entries = entries[: max(1, effectiveLimit)]
|
||||
return entries
|
||||
|
||||
async def _discoverSites(self) -> List[ExternalEntry]:
|
||||
|
|
@ -841,6 +864,285 @@ class OneDriveAdapter(_GraphApiMixin, ServiceAdapter):
|
|||
return entries
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Calendar Adapter
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class CalendarAdapter(_GraphApiMixin, ServiceAdapter):
|
||||
"""ServiceAdapter for Outlook Calendar via Microsoft Graph.
|
||||
|
||||
Path conventions:
|
||||
``""`` / ``"/"`` -> list user calendars
|
||||
``"/<calendarId>"`` -> list events in that calendar
|
||||
``"/<calendarId>/<eventId>"`` -> reserved for future event detail browse
|
||||
|
||||
Downloads return a synthesised ``.ics`` (VCALENDAR/VEVENT) since Microsoft
|
||||
Graph does not expose a ``/$value`` endpoint for events.
|
||||
"""
|
||||
|
||||
_DEFAULT_EVENT_LIMIT = 100
|
||||
_MAX_EVENT_LIMIT = 1000
|
||||
_PAGE_SIZE = 100
|
||||
|
||||
async def browse(
|
||||
self,
|
||||
path: str,
|
||||
filter: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[ExternalEntry]:
|
||||
cleanPath = (path or "").strip("/")
|
||||
if not cleanPath:
|
||||
result = await self._graphGet("me/calendars?$top=100")
|
||||
if "error" in result:
|
||||
logger.warning(f"MSFT Calendar list failed: {result['error']}")
|
||||
return []
|
||||
calendars = result.get("value", [])
|
||||
if filter:
|
||||
calendars = [c for c in calendars if filter.lower() in (c.get("name") or "").lower()]
|
||||
return [
|
||||
ExternalEntry(
|
||||
name=c.get("name", ""),
|
||||
path=f"/{c.get('id', '')}",
|
||||
isFolder=True,
|
||||
metadata={
|
||||
"id": c.get("id"),
|
||||
"color": c.get("color"),
|
||||
"owner": (c.get("owner") or {}).get("address"),
|
||||
"isDefaultCalendar": c.get("isDefaultCalendar", False),
|
||||
"canEdit": c.get("canEdit", False),
|
||||
},
|
||||
)
|
||||
for c in calendars
|
||||
]
|
||||
|
||||
calendarId = cleanPath.split("/", 1)[0]
|
||||
effectiveLimit = self._DEFAULT_EVENT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_EVENT_LIMIT))
|
||||
pageSize = min(self._PAGE_SIZE, effectiveLimit)
|
||||
endpoint: Optional[str] = (
|
||||
f"me/calendars/{calendarId}/events"
|
||||
f"?$top={pageSize}&$orderby=start/dateTime desc"
|
||||
)
|
||||
events: List[Dict[str, Any]] = []
|
||||
while endpoint and len(events) < effectiveLimit:
|
||||
result = await self._graphGet(endpoint)
|
||||
if "error" in result:
|
||||
logger.warning(f"MSFT Calendar events failed: {result['error']}")
|
||||
break
|
||||
for ev in result.get("value", []):
|
||||
events.append(ev)
|
||||
if len(events) >= effectiveLimit:
|
||||
break
|
||||
nextLink = result.get("@odata.nextLink")
|
||||
endpoint = _stripGraphBase(nextLink) if nextLink else None
|
||||
|
||||
return [
|
||||
ExternalEntry(
|
||||
name=ev.get("subject", "(no subject)"),
|
||||
path=f"/{calendarId}/{ev.get('id', '')}",
|
||||
isFolder=False,
|
||||
mimeType="text/calendar",
|
||||
metadata={
|
||||
"id": ev.get("id"),
|
||||
"start": (ev.get("start") or {}).get("dateTime"),
|
||||
"end": (ev.get("end") or {}).get("dateTime"),
|
||||
"location": (ev.get("location") or {}).get("displayName"),
|
||||
"organizer": (ev.get("organizer") or {}).get("emailAddress", {}).get("address"),
|
||||
"isAllDay": ev.get("isAllDay", False),
|
||||
"webLink": ev.get("webLink"),
|
||||
},
|
||||
)
|
||||
for ev in events
|
||||
]
|
||||
|
||||
async def download(self, path: str) -> DownloadResult:
|
||||
cleanPath = (path or "").strip("/")
|
||||
if "/" not in cleanPath:
|
||||
return DownloadResult()
|
||||
eventId = cleanPath.split("/")[-1]
|
||||
ev = await self._graphGet(f"me/events/{eventId}")
|
||||
if "error" in ev:
|
||||
logger.warning(f"MSFT Calendar event fetch failed: {ev['error']}")
|
||||
return DownloadResult()
|
||||
icsBytes = _eventToIcs(ev)
|
||||
subject = ev.get("subject") or eventId
|
||||
safeName = _safeFileName(subject) or "event"
|
||||
return DownloadResult(
|
||||
data=icsBytes,
|
||||
fileName=f"{safeName}.ics",
|
||||
mimeType="text/calendar",
|
||||
)
|
||||
|
||||
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||
return {"error": "Calendar upload not supported"}
|
||||
|
||||
async def search(
|
||||
self,
|
||||
query: str,
|
||||
path: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[ExternalEntry]:
|
||||
safeQuery = query.replace("'", "''")
|
||||
effectiveLimit = self._DEFAULT_EVENT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_EVENT_LIMIT))
|
||||
endpoint = f"me/events?$search=\"{safeQuery}\"&$top={effectiveLimit}"
|
||||
result = await self._graphGet(endpoint)
|
||||
if "error" in result:
|
||||
return []
|
||||
return [
|
||||
ExternalEntry(
|
||||
name=ev.get("subject", "(no subject)"),
|
||||
path=f"/search/{ev.get('id', '')}",
|
||||
isFolder=False,
|
||||
mimeType="text/calendar",
|
||||
metadata={
|
||||
"id": ev.get("id"),
|
||||
"start": (ev.get("start") or {}).get("dateTime"),
|
||||
"end": (ev.get("end") or {}).get("dateTime"),
|
||||
},
|
||||
)
|
||||
for ev in result.get("value", [])
|
||||
]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Contacts Adapter
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class ContactsAdapter(_GraphApiMixin, ServiceAdapter):
|
||||
"""ServiceAdapter for Outlook Contacts via Microsoft Graph.
|
||||
|
||||
Path conventions:
|
||||
``""`` -> list contact folders (default + custom)
|
||||
``"/<folderId>"`` -> list contacts in that folder; the
|
||||
virtual id ``default`` maps to
|
||||
``/me/contacts`` (the user's primary
|
||||
contact list)
|
||||
``"/<folderId>/<contactId>"`` -> reserved for future detail browse
|
||||
|
||||
Downloads return a synthesised vCard 3.0 (.vcf) since Microsoft Graph
|
||||
does not expose a ``/$value`` endpoint for contacts.
|
||||
"""
|
||||
|
||||
_DEFAULT_CONTACT_LIMIT = 200
|
||||
_MAX_CONTACT_LIMIT = 1000
|
||||
_PAGE_SIZE = 100
|
||||
_DEFAULT_FOLDER_ID = "default"
|
||||
|
||||
async def browse(
|
||||
self,
|
||||
path: str,
|
||||
filter: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[ExternalEntry]:
|
||||
cleanPath = (path or "").strip("/")
|
||||
if not cleanPath:
|
||||
folders: List[ExternalEntry] = [
|
||||
ExternalEntry(
|
||||
name="Kontakte",
|
||||
path=f"/{self._DEFAULT_FOLDER_ID}",
|
||||
isFolder=True,
|
||||
metadata={"id": self._DEFAULT_FOLDER_ID, "isDefault": True},
|
||||
),
|
||||
]
|
||||
result = await self._graphGet("me/contactFolders?$top=100")
|
||||
if "error" not in result:
|
||||
for f in result.get("value", []):
|
||||
folders.append(
|
||||
ExternalEntry(
|
||||
name=f.get("displayName", ""),
|
||||
path=f"/{f.get('id', '')}",
|
||||
isFolder=True,
|
||||
metadata={"id": f.get("id"), "parentFolderId": f.get("parentFolderId")},
|
||||
)
|
||||
)
|
||||
else:
|
||||
logger.warning(f"MSFT contactFolders list failed: {result['error']}")
|
||||
return folders
|
||||
|
||||
folderId = cleanPath.split("/", 1)[0]
|
||||
effectiveLimit = self._DEFAULT_CONTACT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_CONTACT_LIMIT))
|
||||
pageSize = min(self._PAGE_SIZE, effectiveLimit)
|
||||
if folderId == self._DEFAULT_FOLDER_ID:
|
||||
endpoint: Optional[str] = f"me/contacts?$top={pageSize}&$orderby=displayName"
|
||||
else:
|
||||
endpoint = f"me/contactFolders/{folderId}/contacts?$top={pageSize}&$orderby=displayName"
|
||||
|
||||
contacts: List[Dict[str, Any]] = []
|
||||
while endpoint and len(contacts) < effectiveLimit:
|
||||
result = await self._graphGet(endpoint)
|
||||
if "error" in result:
|
||||
logger.warning(f"MSFT contacts list failed: {result['error']}")
|
||||
break
|
||||
for c in result.get("value", []):
|
||||
contacts.append(c)
|
||||
if len(contacts) >= effectiveLimit:
|
||||
break
|
||||
nextLink = result.get("@odata.nextLink")
|
||||
endpoint = _stripGraphBase(nextLink) if nextLink else None
|
||||
|
||||
return [
|
||||
ExternalEntry(
|
||||
name=c.get("displayName") or _personLabel(c) or "(no name)",
|
||||
path=f"/{folderId}/{c.get('id', '')}",
|
||||
isFolder=False,
|
||||
mimeType="text/vcard",
|
||||
metadata={
|
||||
"id": c.get("id"),
|
||||
"givenName": c.get("givenName"),
|
||||
"surname": c.get("surname"),
|
||||
"companyName": c.get("companyName"),
|
||||
"emailAddresses": [e.get("address") for e in (c.get("emailAddresses") or []) if e.get("address")],
|
||||
"businessPhones": c.get("businessPhones") or [],
|
||||
"mobilePhone": c.get("mobilePhone"),
|
||||
},
|
||||
)
|
||||
for c in contacts
|
||||
]
|
||||
|
||||
async def download(self, path: str) -> DownloadResult:
|
||||
cleanPath = (path or "").strip("/")
|
||||
if "/" not in cleanPath:
|
||||
return DownloadResult()
|
||||
contactId = cleanPath.split("/")[-1]
|
||||
c = await self._graphGet(f"me/contacts/{contactId}")
|
||||
if "error" in c:
|
||||
logger.warning(f"MSFT contact fetch failed: {c['error']}")
|
||||
return DownloadResult()
|
||||
vcfBytes = _contactToVcard(c)
|
||||
label = c.get("displayName") or _personLabel(c) or contactId
|
||||
safeName = _safeFileName(label) or "contact"
|
||||
return DownloadResult(
|
||||
data=vcfBytes,
|
||||
fileName=f"{safeName}.vcf",
|
||||
mimeType="text/vcard",
|
||||
)
|
||||
|
||||
async def upload(self, path: str, data: bytes, fileName: str) -> dict:
|
||||
return {"error": "Contacts upload not supported"}
|
||||
|
||||
async def search(
|
||||
self,
|
||||
query: str,
|
||||
path: Optional[str] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[ExternalEntry]:
|
||||
safeQuery = query.replace("'", "''")
|
||||
effectiveLimit = self._DEFAULT_CONTACT_LIMIT if limit is None else max(1, min(int(limit), self._MAX_CONTACT_LIMIT))
|
||||
endpoint = f"me/contacts?$search=\"{safeQuery}\"&$top={effectiveLimit}"
|
||||
result = await self._graphGet(endpoint)
|
||||
if "error" in result:
|
||||
return []
|
||||
return [
|
||||
ExternalEntry(
|
||||
name=c.get("displayName") or _personLabel(c) or "(no name)",
|
||||
path=f"/search/{c.get('id', '')}",
|
||||
isFolder=False,
|
||||
mimeType="text/vcard",
|
||||
metadata={"id": c.get("id")},
|
||||
)
|
||||
for c in result.get("value", [])
|
||||
]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# MsftConnector (1:n)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
|
@ -853,6 +1155,8 @@ class MsftConnector(ProviderConnector):
|
|||
"outlook": OutlookAdapter,
|
||||
"teams": TeamsAdapter,
|
||||
"onedrive": OneDriveAdapter,
|
||||
"calendar": CalendarAdapter,
|
||||
"contact": ContactsAdapter,
|
||||
}
|
||||
|
||||
def getAvailableServices(self) -> List[str]:
|
||||
|
|
@ -891,3 +1195,143 @@ def _matchFilter(entry: ExternalEntry, pattern: str) -> bool:
|
|||
"""Simple glob-like filter (supports * wildcard)."""
|
||||
import fnmatch
|
||||
return fnmatch.fnmatch(entry.name.lower(), pattern.lower())
|
||||
|
||||
|
||||
def _safeFileName(name: str) -> str:
|
||||
"""Strip path-unsafe characters and trim length so the result is a usable file name."""
|
||||
import re
|
||||
return re.sub(r'[<>:"/\\|?*\x00-\x1f]', "_", name or "")[:80].strip(". ")
|
||||
|
||||
|
||||
def _personLabel(contact: Dict[str, Any]) -> str:
|
||||
given = (contact.get("givenName") or "").strip()
|
||||
surname = (contact.get("surname") or "").strip()
|
||||
if given or surname:
|
||||
return f"{given} {surname}".strip()
|
||||
company = (contact.get("companyName") or "").strip()
|
||||
return company
|
||||
|
||||
|
||||
def _icsEscape(value: str) -> str:
|
||||
"""Escape RFC 5545 reserved characters in TEXT properties."""
|
||||
if value is None:
|
||||
return ""
|
||||
return (
|
||||
value.replace("\\", "\\\\")
|
||||
.replace(";", "\\;")
|
||||
.replace(",", "\\,")
|
||||
.replace("\r\n", "\\n")
|
||||
.replace("\n", "\\n")
|
||||
)
|
||||
|
||||
|
||||
def _icsDateTime(value: Optional[str]) -> Optional[str]:
|
||||
"""Convert an ISO datetime string to an RFC 5545 DATE-TIME value (UTC)."""
|
||||
if not value:
|
||||
return None
|
||||
from datetime import datetime, timezone
|
||||
try:
|
||||
normalized = value.replace("Z", "+00:00") if value.endswith("Z") else value
|
||||
dt = datetime.fromisoformat(normalized)
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.astimezone(timezone.utc).strftime("%Y%m%dT%H%M%SZ")
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def _eventToIcs(event: Dict[str, Any]) -> bytes:
|
||||
"""Build a minimal RFC 5545 VCALENDAR/VEVENT for a Graph event payload."""
|
||||
from datetime import datetime, timezone
|
||||
uid = event.get("iCalUId") or event.get("id") or "unknown@poweron"
|
||||
summary = _icsEscape(event.get("subject") or "")
|
||||
location = _icsEscape((event.get("location") or {}).get("displayName") or "")
|
||||
body = (event.get("body") or {}).get("content") or ""
|
||||
description = _icsEscape(body)
|
||||
dtstart = _icsDateTime((event.get("start") or {}).get("dateTime"))
|
||||
dtend = _icsDateTime((event.get("end") or {}).get("dateTime"))
|
||||
dtstamp = _icsDateTime(event.get("lastModifiedDateTime")) or datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%SZ")
|
||||
|
||||
lines = [
|
||||
"BEGIN:VCALENDAR",
|
||||
"VERSION:2.0",
|
||||
"PRODID:-//PowerOn//MSFT-Calendar-Adapter//EN",
|
||||
"CALSCALE:GREGORIAN",
|
||||
"BEGIN:VEVENT",
|
||||
f"UID:{uid}",
|
||||
f"DTSTAMP:{dtstamp}",
|
||||
]
|
||||
if dtstart:
|
||||
lines.append(f"DTSTART:{dtstart}")
|
||||
if dtend:
|
||||
lines.append(f"DTEND:{dtend}")
|
||||
if summary:
|
||||
lines.append(f"SUMMARY:{summary}")
|
||||
if location:
|
||||
lines.append(f"LOCATION:{location}")
|
||||
if description:
|
||||
lines.append(f"DESCRIPTION:{description}")
|
||||
organizer = (event.get("organizer") or {}).get("emailAddress", {}).get("address")
|
||||
if organizer:
|
||||
lines.append(f"ORGANIZER:mailto:{organizer}")
|
||||
for att in (event.get("attendees") or []):
|
||||
addr = (att.get("emailAddress") or {}).get("address")
|
||||
if addr:
|
||||
lines.append(f"ATTENDEE:mailto:{addr}")
|
||||
lines.append("END:VEVENT")
|
||||
lines.append("END:VCALENDAR")
|
||||
return ("\r\n".join(lines) + "\r\n").encode("utf-8")
|
||||
|
||||
|
||||
def _contactToVcard(contact: Dict[str, Any]) -> bytes:
|
||||
"""Build a vCard 3.0 from a Graph /me/contacts payload."""
|
||||
given = contact.get("givenName") or ""
|
||||
surname = contact.get("surname") or ""
|
||||
middle = contact.get("middleName") or ""
|
||||
fn = contact.get("displayName") or _personLabel(contact) or contact.get("companyName") or ""
|
||||
|
||||
lines = [
|
||||
"BEGIN:VCARD",
|
||||
"VERSION:3.0",
|
||||
f"N:{surname};{given};{middle};;",
|
||||
f"FN:{fn}",
|
||||
]
|
||||
if contact.get("companyName"):
|
||||
org = contact["companyName"]
|
||||
if contact.get("department"):
|
||||
org = f"{org};{contact['department']}"
|
||||
lines.append(f"ORG:{org}")
|
||||
if contact.get("jobTitle"):
|
||||
lines.append(f"TITLE:{contact['jobTitle']}")
|
||||
for em in (contact.get("emailAddresses") or []):
|
||||
addr = em.get("address")
|
||||
if addr:
|
||||
lines.append(f"EMAIL;TYPE=INTERNET:{addr}")
|
||||
for phone in (contact.get("businessPhones") or []):
|
||||
if phone:
|
||||
lines.append(f"TEL;TYPE=WORK,VOICE:{phone}")
|
||||
if contact.get("mobilePhone"):
|
||||
lines.append(f"TEL;TYPE=CELL,VOICE:{contact['mobilePhone']}")
|
||||
for phone in (contact.get("homePhones") or []):
|
||||
if phone:
|
||||
lines.append(f"TEL;TYPE=HOME,VOICE:{phone}")
|
||||
|
||||
def _appendAddress(addr: Dict[str, Any], typ: str) -> None:
|
||||
if not addr:
|
||||
return
|
||||
street = addr.get("street") or ""
|
||||
city = addr.get("city") or ""
|
||||
state = addr.get("state") or ""
|
||||
postal = addr.get("postalCode") or ""
|
||||
country = addr.get("countryOrRegion") or ""
|
||||
if any([street, city, state, postal, country]):
|
||||
lines.append(f"ADR;TYPE={typ}:;;{street};{city};{state};{postal};{country}")
|
||||
|
||||
_appendAddress(contact.get("businessAddress") or {}, "WORK")
|
||||
_appendAddress(contact.get("homeAddress") or {}, "HOME")
|
||||
_appendAddress(contact.get("otherAddress") or {}, "OTHER")
|
||||
if contact.get("personalNotes"):
|
||||
lines.append(f"NOTE:{_icsEscape(contact['personalNotes'])}")
|
||||
lines.append(f"UID:{contact.get('id', '')}")
|
||||
lines.append("END:VCARD")
|
||||
return ("\r\n".join(lines) + "\r\n").encode("utf-8")
|
||||
|
|
|
|||
|
|
@ -125,7 +125,7 @@ class AiModel(BaseModel):
|
|||
|
||||
# Metadata
|
||||
version: Optional[str] = Field(default=None, description="Model version")
|
||||
lastUpdated: Optional[str] = Field(default=None, description="Last update timestamp")
|
||||
lastUpdated: Optional[float] = Field(default=None, description="Last update timestamp (UTC unix)", json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True) # Allow Callable type
|
||||
|
||||
|
|
@ -162,6 +162,7 @@ class AiCallOptions(BaseModel):
|
|||
|
||||
# Provider filtering (from UI multiselect or automation config)
|
||||
allowedProviders: Optional[List[str]] = Field(default=None, description="List of allowed AI providers to use (empty = all RBAC-permitted)")
|
||||
allowedModels: Optional[List[str]] = Field(default=None, description="Whitelist of allowed model names (AND-filter with allowedProviders). None/empty = all allowed.")
|
||||
|
||||
|
||||
class AiCallRequest(BaseModel):
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class AiAuditLogEntry(BaseModel):
|
|||
|
||||
userId: str = Field(
|
||||
description="ID of the user who triggered the AI call",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
username: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -43,17 +43,17 @@ class AiAuditLogEntry(BaseModel):
|
|||
)
|
||||
mandateId: str = Field(
|
||||
description="Mandate context of the call",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature instance context",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
featureCode: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature code (e.g. workspace, trustee)",
|
||||
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
|
||||
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"}},
|
||||
)
|
||||
instanceLabel: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
|
|||
|
|
@ -100,7 +100,7 @@ class AuditLogEntry(BaseModel):
|
|||
timestamp: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="UTC timestamp when the event occurred",
|
||||
json_schema_extra={"label": "Zeitstempel", "frontend_type": "datetime", "frontend_readonly": True, "frontend_required": True}
|
||||
json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": True}
|
||||
)
|
||||
|
||||
# Actor identification
|
||||
|
|
@ -111,7 +111,7 @@ class AuditLogEntry(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -130,7 +130,7 @@ class AuditLogEntry(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -142,7 +142,7 @@ class AuditLogEntry(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -193,7 +193,13 @@ class AuditLogEntry(BaseModel):
|
|||
success: bool = Field(
|
||||
default=True,
|
||||
description="Whether the action was successful",
|
||||
json_schema_extra={"label": "Erfolgreich", "frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": True}
|
||||
json_schema_extra={
|
||||
"label": "Erfolgreich",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"frontend_format_labels": ["OK", "-", "Fehler"],
|
||||
},
|
||||
)
|
||||
|
||||
errorMessage: Optional[str] = Field(
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ class BackgroundJob(PowerOnModel):
|
|||
description="Mandate scope (used for access checks). None for system-wide jobs.",
|
||||
json_schema_extra={
|
||||
"label": "Mandanten-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
|
|
@ -72,7 +72,7 @@ class BackgroundJob(PowerOnModel):
|
|||
description="Feature instance scope (optional)",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
triggeredBy: Optional[str] = Field(
|
||||
|
|
@ -113,18 +113,18 @@ class BackgroundJob(PowerOnModel):
|
|||
json_schema_extra={"label": "Fehler"},
|
||||
)
|
||||
|
||||
createdAt: datetime = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc),
|
||||
description="When the job was submitted",
|
||||
json_schema_extra={"label": "Eingereicht"},
|
||||
createdAt: float = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc).timestamp(),
|
||||
description="When the job was submitted (UTC unix)",
|
||||
json_schema_extra={"label": "Eingereicht", "frontend_type": "timestamp"},
|
||||
)
|
||||
startedAt: Optional[datetime] = Field(
|
||||
startedAt: Optional[float] = Field(
|
||||
None,
|
||||
description="When the handler began running",
|
||||
json_schema_extra={"label": "Gestartet"},
|
||||
description="When the handler began running (UTC unix)",
|
||||
json_schema_extra={"label": "Gestartet", "frontend_type": "timestamp"},
|
||||
)
|
||||
finishedAt: Optional[datetime] = Field(
|
||||
finishedAt: Optional[float] = Field(
|
||||
None,
|
||||
description="When the handler reached a terminal status",
|
||||
json_schema_extra={"label": "Beendet"},
|
||||
description="When the handler reached a terminal status (UTC unix)",
|
||||
json_schema_extra={"label": "Beendet", "frontend_type": "timestamp"},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -8,12 +8,12 @@ from pydantic import BaseModel, Field
|
|||
|
||||
from modules.shared.i18nRegistry import i18nModel
|
||||
|
||||
_MODEL_REGISTRY: Dict[str, Type["PowerOnModel"]] = {}
|
||||
MODEL_REGISTRY: Dict[str, Type["PowerOnModel"]] = {}
|
||||
|
||||
|
||||
def _getModelByTableName(tableName: str) -> Optional[Type["PowerOnModel"]]:
|
||||
"""Look up a PowerOnModel subclass by its table name (= class name)."""
|
||||
return _MODEL_REGISTRY.get(tableName)
|
||||
return MODEL_REGISTRY.get(tableName)
|
||||
|
||||
|
||||
@i18nModel("Basisdatensatz")
|
||||
|
|
@ -22,7 +22,7 @@ class PowerOnModel(BaseModel):
|
|||
|
||||
def __init_subclass__(cls, **kwargs):
|
||||
super().__init_subclass__(**kwargs)
|
||||
_MODEL_REGISTRY[cls.__name__] = cls
|
||||
MODEL_REGISTRY[cls.__name__] = cls
|
||||
|
||||
sysCreatedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
|
|
@ -46,6 +46,7 @@ class PowerOnModel(BaseModel):
|
|||
"frontend_required": False,
|
||||
"frontend_visible": False,
|
||||
"system": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
sysModifiedAt: Optional[float] = Field(
|
||||
|
|
@ -70,5 +71,6 @@ class PowerOnModel(BaseModel):
|
|||
"frontend_required": False,
|
||||
"frontend_visible": False,
|
||||
"system": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -49,12 +49,12 @@ class BillingAccount(PowerOnModel):
|
|||
mandateId: str = Field(
|
||||
...,
|
||||
description="Foreign key to Mandate",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
userId: Optional[str] = Field(
|
||||
None,
|
||||
description="Foreign key to User (None = mandate pool account, set = user audit account)",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
balance: float = Field(default=0.0, description="Current balance in CHF", json_schema_extra={"label": "Guthaben (CHF)"})
|
||||
warningThreshold: float = Field(
|
||||
|
|
@ -62,10 +62,10 @@ class BillingAccount(PowerOnModel):
|
|||
description="Warning threshold in CHF",
|
||||
json_schema_extra={"label": "Warnschwelle (CHF)"},
|
||||
)
|
||||
lastWarningAt: Optional[datetime] = Field(
|
||||
lastWarningAt: Optional[float] = Field(
|
||||
None,
|
||||
description="Last warning sent timestamp",
|
||||
json_schema_extra={"label": "Letzte Warnung"},
|
||||
description="Last warning sent timestamp (UTC unix)",
|
||||
json_schema_extra={"label": "Letzte Warnung", "frontend_type": "timestamp"},
|
||||
)
|
||||
enabled: bool = Field(default=True, description="Account is active", json_schema_extra={"label": "Aktiv"})
|
||||
|
||||
|
|
@ -81,7 +81,7 @@ class BillingTransaction(PowerOnModel):
|
|||
accountId: str = Field(
|
||||
...,
|
||||
description="Foreign key to BillingAccount",
|
||||
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount"}},
|
||||
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount", "labelField": None}},
|
||||
)
|
||||
transactionType: TransactionTypeEnum = Field(..., description="Transaction type", json_schema_extra={"label": "Typ"})
|
||||
amount: float = Field(..., description="Amount in CHF (always positive)", json_schema_extra={"label": "Betrag (CHF)"})
|
||||
|
|
@ -100,19 +100,19 @@ class BillingTransaction(PowerOnModel):
|
|||
featureInstanceId: Optional[str] = Field(
|
||||
None,
|
||||
description="Feature instance ID",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
featureCode: Optional[str] = Field(
|
||||
None,
|
||||
description="Feature code (e.g., automation)",
|
||||
json_schema_extra={"label": "Feature-Code", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
|
||||
json_schema_extra={"label": "Feature-Code", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"}},
|
||||
)
|
||||
aicoreProvider: Optional[str] = Field(None, description="AICore provider (anthropic, openai, etc.)", json_schema_extra={"label": "AI-Anbieter"})
|
||||
aicoreModel: Optional[str] = Field(None, description="AICore model name (e.g., claude-4-sonnet, gpt-4o)", json_schema_extra={"label": "AI-Modell"})
|
||||
createdByUserId: Optional[str] = Field(
|
||||
None,
|
||||
description="User who created/caused this transaction",
|
||||
json_schema_extra={"label": "Erstellt von Benutzer", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Erstellt von Benutzer", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
|
||||
# AI call metadata (for per-call analytics)
|
||||
|
|
@ -133,7 +133,7 @@ class BillingSettings(BaseModel):
|
|||
mandateId: str = Field(
|
||||
...,
|
||||
description="Foreign key to Mandate (UNIQUE)",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
|
||||
warningThresholdPercent: float = Field(
|
||||
|
|
@ -158,7 +158,7 @@ class BillingSettings(BaseModel):
|
|||
)
|
||||
rechargeMaxPerMonth: int = Field(default=3, description="Max auto-recharges per month", json_schema_extra={"label": "Max. Nachladungen/Monat"})
|
||||
rechargesThisMonth: int = Field(default=0, description="Counter: auto-recharges used this month", json_schema_extra={"label": "Nachladungen diesen Monat"})
|
||||
monthResetAt: Optional[datetime] = Field(None, description="When rechargesThisMonth was last reset", json_schema_extra={"label": "Monats-Reset"})
|
||||
monthResetAt: Optional[float] = Field(None, description="When rechargesThisMonth was last reset (UTC unix)", json_schema_extra={"label": "Monats-Reset", "frontend_type": "timestamp"})
|
||||
|
||||
# Notifications
|
||||
notifyEmails: List[str] = Field(
|
||||
|
|
@ -174,10 +174,10 @@ class BillingSettings(BaseModel):
|
|||
description="Peak indexed data volume MB this billing period",
|
||||
json_schema_extra={"label": "Speicher-Peak (MB)"},
|
||||
)
|
||||
storagePeriodStartAt: Optional[datetime] = Field(
|
||||
storagePeriodStartAt: Optional[float] = Field(
|
||||
None,
|
||||
description="Subscription billing period start used for storage reset",
|
||||
json_schema_extra={"label": "Speicher-Periodenbeginn"},
|
||||
description="Subscription billing period start used for storage reset (UTC unix)",
|
||||
json_schema_extra={"label": "Speicher-Periodenbeginn", "frontend_type": "timestamp"},
|
||||
)
|
||||
storageBilledUpToMB: float = Field(
|
||||
default=0.0,
|
||||
|
|
@ -193,9 +193,10 @@ class StripeWebhookEvent(BaseModel):
|
|||
description="Primary key",
|
||||
)
|
||||
event_id: str = Field(..., description="Stripe event ID (evt_xxx)")
|
||||
processed_at: datetime = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc),
|
||||
description="When the event was processed",
|
||||
processed_at: float = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc).timestamp(),
|
||||
description="When the event was processed (UTC unix)",
|
||||
json_schema_extra={"frontend_type": "timestamp"},
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -210,10 +211,14 @@ class UsageStatistics(BaseModel):
|
|||
accountId: str = Field(
|
||||
...,
|
||||
description="Foreign key to BillingAccount",
|
||||
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount"}},
|
||||
json_schema_extra={"label": "Konto-ID", "fk_target": {"db": "poweron_billing", "table": "BillingAccount", "labelField": None}},
|
||||
)
|
||||
periodType: PeriodTypeEnum = Field(..., description="Period type", json_schema_extra={"label": "Periodentyp"})
|
||||
periodStart: date = Field(..., description="Period start date", json_schema_extra={"label": "Periodenbeginn"})
|
||||
periodStart: date = Field(
|
||||
...,
|
||||
description="Period start date",
|
||||
json_schema_extra={"label": "Periodenbeginn", "frontend_type": "date"},
|
||||
)
|
||||
|
||||
# Aggregated values
|
||||
totalCostCHF: float = Field(default=0.0, description="Total cost in CHF", json_schema_extra={"label": "Gesamtkosten (CHF)"})
|
||||
|
|
|
|||
|
|
@ -16,12 +16,12 @@ class ChatLog(PowerOnModel):
|
|||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
|
||||
workflowId: str = Field(
|
||||
description="Foreign key to workflow",
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||
)
|
||||
message: str = Field(description="Log message", json_schema_extra={"label": "Nachricht"})
|
||||
type: str = Field(description="Log type (info, warning, error, etc.)", json_schema_extra={"label": "Typ"})
|
||||
timestamp: float = Field(default_factory=getUtcTimestamp,
|
||||
description="When the log entry was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel"})
|
||||
description="When the log entry was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp"})
|
||||
status: Optional[str] = Field(None, description="Status of the log entry", json_schema_extra={"label": "Status"})
|
||||
progress: Optional[float] = Field(None, description="Progress indicator (0.0 to 1.0)", json_schema_extra={"label": "Fortschritt"})
|
||||
performance: Optional[Dict[str, Any]] = Field(None, description="Performance metrics", json_schema_extra={"label": "Leistung"})
|
||||
|
|
@ -37,11 +37,11 @@ class ChatDocument(PowerOnModel):
|
|||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
|
||||
messageId: str = Field(
|
||||
description="Foreign key to message",
|
||||
json_schema_extra={"label": "Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage"}},
|
||||
json_schema_extra={"label": "Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage", "labelField": None}},
|
||||
)
|
||||
fileId: str = Field(
|
||||
description="Foreign key to file",
|
||||
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem"}},
|
||||
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"}},
|
||||
)
|
||||
fileName: str = Field(description="Name of the file", json_schema_extra={"label": "Dateiname"})
|
||||
fileSize: int = Field(description="Size of the file", json_schema_extra={"label": "Dateigröße"})
|
||||
|
|
@ -81,12 +81,12 @@ class ChatMessage(PowerOnModel):
|
|||
id: str = Field(default_factory=lambda: str(uuid.uuid4()), description="Primary key", json_schema_extra={"label": "ID"})
|
||||
workflowId: str = Field(
|
||||
description="Foreign key to workflow",
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||
)
|
||||
parentMessageId: Optional[str] = Field(
|
||||
None,
|
||||
description="Parent message ID for threading",
|
||||
json_schema_extra={"label": "Übergeordnete Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage"}},
|
||||
json_schema_extra={"label": "Übergeordnete Nachrichten-ID", "fk_target": {"db": "poweron_chat", "table": "ChatMessage", "labelField": None}},
|
||||
)
|
||||
documents: List[ChatDocument] = Field(default_factory=list, description="Associated documents", json_schema_extra={"label": "Dokumente"})
|
||||
documentsLabel: Optional[str] = Field(None, description="Label for the set of documents", json_schema_extra={"label": "Dokumenten-Label"})
|
||||
|
|
@ -97,7 +97,7 @@ class ChatMessage(PowerOnModel):
|
|||
sequenceNr: Optional[int] = Field(default=0,
|
||||
description="Sequence number of the message (set automatically)", json_schema_extra={"label": "Sequenznummer"})
|
||||
publishedAt: Optional[float] = Field(default=None,
|
||||
description="When the message was published (UTC timestamp in seconds)", json_schema_extra={"label": "Veröffentlicht am"})
|
||||
description="When the message was published (UTC timestamp in seconds)", json_schema_extra={"label": "Veröffentlicht am", "frontend_type": "timestamp"})
|
||||
success: Optional[bool] = Field(None, description="Whether the message processing was successful", json_schema_extra={"label": "Erfolg"})
|
||||
actionId: Optional[str] = Field(None, description="ID of the action that produced this message", json_schema_extra={"label": "Aktions-ID"})
|
||||
actionMethod: Optional[str] = Field(None, description="Method of the action that produced this message", json_schema_extra={"label": "Aktionsmethode"})
|
||||
|
|
@ -125,7 +125,7 @@ class ChatWorkflow(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
linkedWorkflowId: Optional[str] = Field(
|
||||
|
|
@ -219,7 +219,7 @@ class UserInputRequest(BaseModel):
|
|||
workflowId: Optional[str] = Field(
|
||||
None,
|
||||
description="Optional ID of the workflow to continue",
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||
)
|
||||
allowedProviders: Optional[List[str]] = Field(None, description="List of allowed AI providers (multiselect)", json_schema_extra={"label": "Erlaubte Anbieter"})
|
||||
|
||||
|
|
@ -281,8 +281,8 @@ class ObservationPreview(BaseModel):
|
|||
# Extended metadata fields
|
||||
mimeType: Optional[str] = Field(default=None, description="MIME type", json_schema_extra={"label": "MIME-Typ"})
|
||||
size: Optional[str] = Field(default=None, description="File size", json_schema_extra={"label": "Größe"})
|
||||
created: Optional[str] = Field(default=None, description="Creation timestamp", json_schema_extra={"label": "Erstellt"})
|
||||
modified: Optional[str] = Field(default=None, description="Modification timestamp", json_schema_extra={"label": "Geändert"})
|
||||
created: Optional[float] = Field(default=None, description="Creation timestamp (UTC unix)", json_schema_extra={"label": "Erstellt", "frontend_type": "timestamp"})
|
||||
modified: Optional[float] = Field(default=None, description="Modification timestamp (UTC unix)", json_schema_extra={"label": "Geändert", "frontend_type": "timestamp"})
|
||||
typeGroup: Optional[str] = Field(default=None, description="Document type group", json_schema_extra={"label": "Typgruppe"})
|
||||
documentId: Optional[str] = Field(default=None, description="Document ID", json_schema_extra={"label": "Dokument-ID"})
|
||||
reference: Optional[str] = Field(default=None, description="Document reference", json_schema_extra={"label": "Referenz"})
|
||||
|
|
@ -332,7 +332,7 @@ class ActionItem(BaseModel):
|
|||
retryCount: int = Field(default=0, description="Number of retries attempted", json_schema_extra={"label": "Wiederholungen"})
|
||||
retryMax: int = Field(default=3, description="Maximum number of retries", json_schema_extra={"label": "Max. Wiederholungen"})
|
||||
processingTime: Optional[float] = Field(None, description="Processing time in seconds", json_schema_extra={"label": "Bearbeitungszeit"})
|
||||
timestamp: float = Field(..., description="When the action was executed (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel"})
|
||||
timestamp: float = Field(..., description="When the action was executed (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp"})
|
||||
result: Optional[str] = Field(None, description="Result of the action", json_schema_extra={"label": "Ergebnis"})
|
||||
|
||||
def setSuccess(self, result: str = None) -> None:
|
||||
|
|
@ -361,13 +361,13 @@ class TaskItem(BaseModel):
|
|||
workflowId: str = Field(
|
||||
...,
|
||||
description="Workflow ID",
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||
)
|
||||
userInput: str = Field(..., description="User input that triggered the task", json_schema_extra={"label": "Benutzereingabe"})
|
||||
status: TaskStatus = Field(default=TaskStatus.PENDING, description="Task status", json_schema_extra={"label": "Status"})
|
||||
error: Optional[str] = Field(None, description="Error message if task failed", json_schema_extra={"label": "Fehler"})
|
||||
startedAt: Optional[float] = Field(None, description="When the task started (UTC timestamp in seconds)", json_schema_extra={"label": "Gestartet am"})
|
||||
finishedAt: Optional[float] = Field(None, description="When the task finished (UTC timestamp in seconds)", json_schema_extra={"label": "Beendet am"})
|
||||
startedAt: Optional[float] = Field(None, description="When the task started (UTC timestamp in seconds)", json_schema_extra={"label": "Gestartet am", "frontend_type": "timestamp"})
|
||||
finishedAt: Optional[float] = Field(None, description="When the task finished (UTC timestamp in seconds)", json_schema_extra={"label": "Beendet am", "frontend_type": "timestamp"})
|
||||
actionList: List[ActionItem] = Field(default_factory=list, description="List of actions to execute", json_schema_extra={"label": "Aktionen"})
|
||||
retryCount: int = Field(default=0, description="Number of retries attempted", json_schema_extra={"label": "Wiederholungen"})
|
||||
retryMax: int = Field(default=3, description="Maximum number of retries", json_schema_extra={"label": "Max. Wiederholungen"})
|
||||
|
|
@ -402,7 +402,7 @@ class TaskHandover(BaseModel):
|
|||
improvements: List[str] = Field(default_factory=list, description="Improvement suggestions", json_schema_extra={"label": "Verbesserungen"})
|
||||
workflowSummary: Optional[str] = Field(None, description="Summarized workflow context", json_schema_extra={"label": "Workflow-Zusammenfassung"})
|
||||
messageHistory: List[str] = Field(default_factory=list, description="Key message summaries", json_schema_extra={"label": "Nachrichtenverlauf"})
|
||||
timestamp: float = Field(..., description="When the handover was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel"})
|
||||
timestamp: float = Field(..., description="When the handover was created (UTC timestamp in seconds)", json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp"})
|
||||
handoverType: str = Field(default="task", description="Type of handover: task, phase, or workflow", json_schema_extra={"label": "Übergabetyp"})
|
||||
|
||||
class TaskContext(BaseModel):
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class ContentObject(BaseModel):
|
|||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
fileId: str = Field(
|
||||
description="FK to the physical file",
|
||||
json_schema_extra={"fk_target": {"db": "poweron_management", "table": "FileItem"}},
|
||||
json_schema_extra={"fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"}},
|
||||
)
|
||||
contentType: str = Field(description="text, image, videostream, audiostream, other")
|
||||
data: str = Field(default="", description="Content data (text, base64, URL)")
|
||||
|
|
|
|||
|
|
@ -23,10 +23,15 @@ class DataSource(PowerOnModel):
|
|||
)
|
||||
connectionId: str = Field(
|
||||
description="FK to UserConnection",
|
||||
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection"}},
|
||||
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection", "labelField": "externalUsername"}},
|
||||
)
|
||||
sourceType: str = Field(
|
||||
description="sharepointFolder, googleDriveFolder, outlookFolder, ftpFolder, clickupList (path under /team/...)",
|
||||
description=(
|
||||
"sharepointFolder, onedriveFolder, googleDriveFolder, "
|
||||
"outlookFolder, gmailFolder, ftpFolder, clickupList "
|
||||
"(path under /team/...), kdriveFolder, calendarFolder, "
|
||||
"contactFolder"
|
||||
),
|
||||
json_schema_extra={"label": "Quellentyp"},
|
||||
)
|
||||
path: str = Field(
|
||||
|
|
@ -45,17 +50,17 @@ class DataSource(PowerOnModel):
|
|||
featureInstanceId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Scoped to feature instance",
|
||||
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate scope",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
userId: str = Field(
|
||||
default="",
|
||||
description="Owner user ID",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
autoSync: bool = Field(
|
||||
default=False,
|
||||
|
|
@ -65,7 +70,7 @@ class DataSource(PowerOnModel):
|
|||
lastSynced: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Last sync timestamp",
|
||||
json_schema_extra={"label": "Letzter Sync"},
|
||||
json_schema_extra={"label": "Letzter Sync", "frontend_type": "timestamp"},
|
||||
)
|
||||
scope: str = Field(
|
||||
default="personal",
|
||||
|
|
@ -91,5 +96,9 @@ class ExternalEntry(BaseModel):
|
|||
isFolder: bool = Field(default=False, description="True if directory/folder")
|
||||
size: Optional[int] = Field(default=None, description="File size in bytes")
|
||||
mimeType: Optional[str] = Field(default=None, description="MIME type (files only)")
|
||||
lastModified: Optional[float] = Field(default=None, description="Last modification timestamp")
|
||||
lastModified: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Last modification timestamp",
|
||||
json_schema_extra={"frontend_type": "timestamp"},
|
||||
)
|
||||
metadata: Dict[str, Any] = Field(default_factory=dict, description="Provider-specific metadata")
|
||||
|
|
|
|||
|
|
@ -4,10 +4,13 @@
|
|||
Document reference models for typed document references in workflows.
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
import logging
|
||||
from typing import Any, List, Optional
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.shared.i18nRegistry import i18nModel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DocumentReference(BaseModel):
|
||||
"""Base class for document references"""
|
||||
|
|
@ -107,11 +110,104 @@ class DocumentReferenceList(BaseModel):
|
|||
# docItem:documentId
|
||||
references.append(DocumentItemReference(documentId=parts[0]))
|
||||
|
||||
# Unknown format - skip or log warning
|
||||
else:
|
||||
# Try to parse as simple string (backward compatibility)
|
||||
# Assume it's a label if it doesn't match known patterns
|
||||
if refStr:
|
||||
if not refStr:
|
||||
continue
|
||||
import re
|
||||
if re.match(r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$', refStr, re.I):
|
||||
references.append(DocumentItemReference(documentId=refStr))
|
||||
else:
|
||||
references.append(DocumentListReference(label=refStr))
|
||||
|
||||
return cls(references=references)
|
||||
|
||||
|
||||
def coerceDocumentReferenceList(value: Any) -> DocumentReferenceList:
|
||||
"""Tolerant coercion of any agent/UI-supplied document list to
|
||||
:class:`DocumentReferenceList`.
|
||||
|
||||
Accepts the canonical formats plus the dict-wrapper shapes that
|
||||
LLM tool-callers tend to generate when they see a
|
||||
``type=DocumentList`` parameter:
|
||||
|
||||
* ``None`` / ``""`` -> empty list
|
||||
* :class:`DocumentReferenceList` -> as-is
|
||||
* ``str`` -> single-element string list
|
||||
* ``list[str]`` -> :meth:`from_string_list`
|
||||
* ``list[dict]`` with ``id`` or ``documentId`` -> item references
|
||||
* ``{"documents": [...]}`` / ``{"references": [...]}`` ->
|
||||
recurse into the inner list (this is the shape LLMs love)
|
||||
* ``{"id": "..."}`` / ``{"documentId": "..."}`` -> single
|
||||
item reference
|
||||
* any unrecognised input -> empty list with a WARN log; never
|
||||
raises (the caller decides whether an empty list is fatal).
|
||||
"""
|
||||
if value is None or value == "":
|
||||
return DocumentReferenceList(references=[])
|
||||
if isinstance(value, DocumentReferenceList):
|
||||
return value
|
||||
if isinstance(value, str):
|
||||
return DocumentReferenceList.from_string_list([value])
|
||||
|
||||
if isinstance(value, dict):
|
||||
for innerKey in ("documents", "references", "items", "files"):
|
||||
if innerKey in value and isinstance(value[innerKey], list):
|
||||
return coerceDocumentReferenceList(value[innerKey])
|
||||
docId = value.get("documentId") or value.get("id")
|
||||
if docId:
|
||||
docIdStr = str(docId)
|
||||
if docIdStr.startswith("docItem:") or docIdStr.startswith("docList:"):
|
||||
return DocumentReferenceList.from_string_list([docIdStr])
|
||||
return DocumentReferenceList(references=[
|
||||
DocumentItemReference(
|
||||
documentId=docIdStr,
|
||||
fileName=value.get("fileName") or value.get("name"),
|
||||
)
|
||||
])
|
||||
logger.warning(
|
||||
f"coerceDocumentReferenceList: unsupported dict shape "
|
||||
f"(keys={list(value.keys())}); returning empty list."
|
||||
)
|
||||
return DocumentReferenceList(references=[])
|
||||
|
||||
if isinstance(value, list):
|
||||
if not value:
|
||||
return DocumentReferenceList(references=[])
|
||||
first = value[0]
|
||||
if isinstance(first, str):
|
||||
return DocumentReferenceList.from_string_list(value)
|
||||
if isinstance(first, dict):
|
||||
references: List[DocumentReference] = []
|
||||
for item in value:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
docId = item.get("documentId") or item.get("id")
|
||||
if docId:
|
||||
docIdStr = str(docId)
|
||||
if docIdStr.startswith("docItem:") or docIdStr.startswith("docList:"):
|
||||
parsed = DocumentReferenceList.from_string_list([docIdStr])
|
||||
references.extend(parsed.references)
|
||||
else:
|
||||
references.append(DocumentItemReference(
|
||||
documentId=docIdStr,
|
||||
fileName=item.get("fileName") or item.get("name"),
|
||||
))
|
||||
elif item.get("label"):
|
||||
references.append(DocumentListReference(
|
||||
label=str(item["label"]),
|
||||
messageId=item.get("messageId"),
|
||||
))
|
||||
return DocumentReferenceList(references=references)
|
||||
# Mixed/object list (e.g. inline ActionDocument-like): caller
|
||||
# must pre-handle that case before calling this coercer.
|
||||
logger.warning(
|
||||
f"coerceDocumentReferenceList: list element type "
|
||||
f"{type(first).__name__} not recognised; returning empty list."
|
||||
)
|
||||
return DocumentReferenceList(references=[])
|
||||
|
||||
logger.warning(
|
||||
f"coerceDocumentReferenceList: unsupported value type "
|
||||
f"{type(value).__name__}; returning empty list."
|
||||
)
|
||||
return DocumentReferenceList(references=[])
|
||||
|
|
|
|||
|
|
@ -95,7 +95,14 @@ class ExtractionOptions(BaseModel):
|
|||
imageQuality: int = Field(default=85, ge=1, le=100, description="Image quality (1-100)")
|
||||
|
||||
# Merging strategy
|
||||
mergeStrategy: MergeStrategy = Field(default_factory=MergeStrategy, description="Strategy for merging extraction results")
|
||||
mergeStrategy: Optional[MergeStrategy] = Field(
|
||||
default_factory=MergeStrategy,
|
||||
description=(
|
||||
"Strategy for merging extraction results. Pass None to skip merging entirely "
|
||||
"(required for per-chunk ingestion pipelines like RAG, where per-page/per-section "
|
||||
"granularity must be preserved for embedding)."
|
||||
),
|
||||
)
|
||||
|
||||
# Optional chunking parameters (for backward compatibility)
|
||||
chunkAllowed: Optional[bool] = Field(default=None, description="Whether chunking is allowed")
|
||||
|
|
|
|||
|
|
@ -23,11 +23,11 @@ class FeatureDataSource(PowerOnModel):
|
|||
)
|
||||
featureInstanceId: str = Field(
|
||||
description="FK to FeatureInstance",
|
||||
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
featureCode: str = Field(
|
||||
description="Feature code (e.g. trustee, commcoach)",
|
||||
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"}},
|
||||
json_schema_extra={"label": "Feature", "fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"}},
|
||||
)
|
||||
tableName: str = Field(
|
||||
description="Table name from DATA_OBJECTS meta (e.g. TrusteePosition)",
|
||||
|
|
@ -44,16 +44,16 @@ class FeatureDataSource(PowerOnModel):
|
|||
mandateId: str = Field(
|
||||
default="",
|
||||
description="Mandate scope",
|
||||
json_schema_extra={"label": "Mandant", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandant", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
userId: str = Field(
|
||||
default="",
|
||||
description="Owner user ID",
|
||||
json_schema_extra={"label": "Benutzer", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
workspaceInstanceId: str = Field(
|
||||
description="Workspace feature instance where this source is used",
|
||||
json_schema_extra={"label": "Workspace", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Workspace", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
scope: str = Field(
|
||||
default="personal",
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ class FeatureInstance(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Feature", "column": "code"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Feature", "column": "code", "labelField": "code"},
|
||||
},
|
||||
)
|
||||
mandateId: str = Field(
|
||||
|
|
@ -53,7 +53,7 @@ class FeatureInstance(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
label: str = Field(
|
||||
|
|
|
|||
|
|
@ -1,82 +0,0 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""FileFolder: hierarchical folder structure for file organization."""
|
||||
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, Field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.i18nRegistry import i18nModel
|
||||
import uuid
|
||||
|
||||
|
||||
@i18nModel("Dateiordner")
|
||||
class FileFolder(PowerOnModel):
|
||||
"""Hierarchischer Ordner fuer die Dateiverwaltung."""
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
description="Primary key",
|
||||
json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
|
||||
)
|
||||
name: str = Field(
|
||||
description="Folder name",
|
||||
json_schema_extra={"label": "Name", "frontend_type": "text", "frontend_readonly": False, "frontend_required": True},
|
||||
)
|
||||
parentId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Parent folder ID (null = root)",
|
||||
json_schema_extra={
|
||||
"label": "Uebergeordneter Ordner",
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_management", "table": "FileFolder"},
|
||||
},
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate context",
|
||||
json_schema_extra={
|
||||
"label": "Mandanten-ID",
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature instance context",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz-ID",
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
},
|
||||
)
|
||||
scope: str = Field(
|
||||
default="personal",
|
||||
description="Data visibility scope: personal, featureInstance, mandate, global. Inherited by files in this folder.",
|
||||
json_schema_extra={
|
||||
"label": "Sichtbarkeit",
|
||||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"frontend_options": [
|
||||
{"value": "personal", "label": "Persönlich"},
|
||||
{"value": "featureInstance", "label": "Feature-Instanz"},
|
||||
{"value": "mandate", "label": "Mandant"},
|
||||
{"value": "global", "label": "Global"},
|
||||
],
|
||||
},
|
||||
)
|
||||
neutralize: bool = Field(
|
||||
default=False,
|
||||
description="Whether files in this folder should be neutralized before AI processing. Inherited by new/moved files.",
|
||||
json_schema_extra={
|
||||
"label": "Neutralisieren",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
|
|
@ -10,6 +10,69 @@ import uuid
|
|||
import base64
|
||||
|
||||
|
||||
@i18nModel("Ordner")
|
||||
class FileFolder(PowerOnModel):
|
||||
"""Persistenter Datei-Ordner im Management-DB-Kontext (RBAC wie FileItem)."""
|
||||
|
||||
id: str = Field(
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
description="Primary key",
|
||||
json_schema_extra={"label": "ID", "frontend_type": "text", "frontend_readonly": True, "frontend_required": False},
|
||||
)
|
||||
name: str = Field(
|
||||
description="Display name of the folder",
|
||||
json_schema_extra={"label": "Name", "frontend_type": "text", "frontend_readonly": False, "frontend_required": True},
|
||||
)
|
||||
parentId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Parent folder id; empty or None for root",
|
||||
json_schema_extra={
|
||||
"label": "Uebergeordneter Ordner",
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_management", "table": "FileFolder", "labelField": "name"},
|
||||
},
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
default="",
|
||||
description="ID of the mandate this folder belongs to",
|
||||
json_schema_extra={
|
||||
"label": "Mandant",
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
default="",
|
||||
description="ID of the feature instance this folder belongs to",
|
||||
json_schema_extra={
|
||||
"label": "Feature-Instanz",
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
scope: str = Field(
|
||||
default="personal",
|
||||
description="Data visibility scope: personal, featureInstance, mandate, global",
|
||||
json_schema_extra={"label": "Sichtbarkeit", "frontend_type": "select", "frontend_readonly": False, "frontend_required": False, "frontend_options": [
|
||||
{"value": "personal", "label": "Persönlich"},
|
||||
{"value": "featureInstance", "label": "Feature-Instanz"},
|
||||
{"value": "mandate", "label": "Mandant"},
|
||||
{"value": "global", "label": "Global"},
|
||||
]},
|
||||
)
|
||||
neutralize: bool = Field(
|
||||
default=False,
|
||||
description="Whether files in this folder should be neutralized before AI processing",
|
||||
json_schema_extra={"label": "Neutralisieren", "frontend_type": "checkbox", "frontend_readonly": False, "frontend_required": False},
|
||||
)
|
||||
|
||||
|
||||
@i18nModel("Datei")
|
||||
class FileItem(PowerOnModel):
|
||||
"""Metadaten einer gespeicherten Datei."""
|
||||
|
|
@ -30,10 +93,7 @@ class FileItem(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_fk_source": "/api/mandates/",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_model": "Mandate",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
|
|
@ -44,10 +104,18 @@ class FileItem(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_fk_source": "/api/features/instances",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_model": "FeatureInstance",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
folderId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="ID of the folder containing this file (if any)",
|
||||
json_schema_extra={
|
||||
"label": "Ordner",
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_management", "table": "FileFolder", "labelField": "name"},
|
||||
},
|
||||
)
|
||||
mimeType: str = Field(
|
||||
|
|
@ -74,17 +142,6 @@ class FileItem(PowerOnModel):
|
|||
description="Tags for categorization and search",
|
||||
json_schema_extra={"label": "Tags", "frontend_type": "tags", "frontend_readonly": False, "frontend_required": False},
|
||||
)
|
||||
folderId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="ID of the parent folder",
|
||||
json_schema_extra={
|
||||
"label": "Ordner-ID",
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_management", "table": "FileFolder"},
|
||||
},
|
||||
)
|
||||
description: Optional[str] = Field(
|
||||
default=None,
|
||||
description="User-provided description of the file",
|
||||
|
|
|
|||
|
|
@ -5,10 +5,11 @@ Invitation model for self-service onboarding.
|
|||
Token-basierte Einladungen für neue User zu Mandanten/Features.
|
||||
"""
|
||||
|
||||
import time
|
||||
import uuid
|
||||
import secrets
|
||||
from typing import Optional, List
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel, Field, computed_field
|
||||
from modules.datamodels.datamodelBase import PowerOnModel
|
||||
from modules.shared.i18nRegistry import i18nModel
|
||||
|
||||
|
|
@ -37,7 +38,7 @@ class Invitation(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
|
|
@ -48,7 +49,7 @@ class Invitation(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
roleIds: List[str] = Field(
|
||||
|
|
@ -80,7 +81,7 @@ class Invitation(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
usedAt: Optional[float] = Field(
|
||||
|
|
@ -94,10 +95,26 @@ class Invitation(PowerOnModel):
|
|||
json_schema_extra={"label": "Widerrufen am", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
|
||||
emailSent: Optional[bool] = Field(
|
||||
emailSentFlag: Optional[bool] = Field(
|
||||
default=False,
|
||||
description="Whether the invitation email was successfully sent",
|
||||
json_schema_extra={"label": "E-Mail gesendet", "frontend_type": "checkbox", "frontend_readonly": True, "frontend_required": False}
|
||||
json_schema_extra={
|
||||
"label": "E-Mail gesendet",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
emailSentAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Timestamp when the invitation email was sent (UTC, seconds)",
|
||||
json_schema_extra={
|
||||
"label": "E-Mail gesendet am",
|
||||
"frontend_type": "timestamp",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
|
||||
maxUses: int = Field(
|
||||
|
|
@ -113,3 +130,33 @@ class Invitation(PowerOnModel):
|
|||
description="Current number of times this invitation has been used",
|
||||
json_schema_extra={"label": "Aktuelle Verwendungen", "frontend_type": "number", "frontend_readonly": True, "frontend_required": False}
|
||||
)
|
||||
|
||||
@computed_field( # type: ignore[prop-decorator]
|
||||
json_schema_extra={
|
||||
"label": "Abgelaufen",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
@property
|
||||
def expiredFlag(self) -> bool:
|
||||
"""True iff `expiresAt` lies in the past (UTC)."""
|
||||
if self.expiresAt is None:
|
||||
return False
|
||||
return float(self.expiresAt) < time.time()
|
||||
|
||||
@computed_field( # type: ignore[prop-decorator]
|
||||
json_schema_extra={
|
||||
"label": "Verbraucht",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
@property
|
||||
def usedUpFlag(self) -> bool:
|
||||
"""True iff `currentUses >= maxUses`."""
|
||||
return (self.currentUses or 0) >= (self.maxUses or 1)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ Unified JSON document schema and helpers used by both generation prompts and ren
|
|||
This defines a single canonical template and the supported section types.
|
||||
"""
|
||||
|
||||
from typing import List
|
||||
from typing import List, Literal, TypedDict
|
||||
|
||||
# Canonical list of supported section types across the system
|
||||
supportedSectionTypes: List[str] = [
|
||||
|
|
@ -18,6 +18,21 @@ supportedSectionTypes: List[str] = [
|
|||
"image",
|
||||
]
|
||||
|
||||
class InlineRun(TypedDict, total=False):
|
||||
"""Single inline content run. Every paragraph/cell/list-item is a List[InlineRun]."""
|
||||
type: Literal["text", "image", "link", "bold", "italic", "code"]
|
||||
value: str # text content (for text/bold/italic/code/link-label)
|
||||
fileId: str # for type=image: reference to FileItem
|
||||
base64Data: str # for type=image: resolved base64 (post-processing)
|
||||
mimeType: str # for type=image: e.g. "image/png"
|
||||
widthPt: int # for type=image: optional render width
|
||||
href: str # for type=link: URL target
|
||||
|
||||
supportedInlineRunTypes: List[str] = [
|
||||
"text", "image", "link", "bold", "italic", "code",
|
||||
]
|
||||
|
||||
|
||||
# Canonical JSON template used for AI generation (documents array + sections)
|
||||
# This template is used for STRUCTURE generation - sections have empty elements arrays.
|
||||
# For content generation, elements arrays will be populated later.
|
||||
|
|
|
|||
|
|
@ -30,17 +30,17 @@ class FileContentIndex(PowerOnModel):
|
|||
)
|
||||
userId: str = Field(
|
||||
description="Owner user ID",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
default="",
|
||||
description="Feature instance scope",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
mandateId: str = Field(
|
||||
default="",
|
||||
description="Mandate scope",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
fileName: str = Field(
|
||||
description="Original file name",
|
||||
|
|
@ -78,7 +78,7 @@ class FileContentIndex(PowerOnModel):
|
|||
extractedAt: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="Extraction timestamp",
|
||||
json_schema_extra={"label": "Extrahiert am"},
|
||||
json_schema_extra={"label": "Extrahiert am", "frontend_type": "timestamp"},
|
||||
)
|
||||
status: str = Field(
|
||||
default="pending",
|
||||
|
|
@ -90,6 +90,16 @@ class FileContentIndex(PowerOnModel):
|
|||
description="Data visibility scope: personal, featureInstance, mandate, global",
|
||||
json_schema_extra={"label": "Sichtbarkeit"},
|
||||
)
|
||||
sourceKind: str = Field(
|
||||
default="file",
|
||||
description="Origin of the indexed content: file, sharepoint_item, outlook_message, outlook_attachment, ...",
|
||||
json_schema_extra={"label": "Quellenart"},
|
||||
)
|
||||
connectionId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="UserConnection ID if this index entry originates from an external connector",
|
||||
json_schema_extra={"label": "Connection-ID"},
|
||||
)
|
||||
neutralizationStatus: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Neutralization status: completed, failed, skipped, None = not required",
|
||||
|
|
@ -116,16 +126,16 @@ class ContentChunk(PowerOnModel):
|
|||
)
|
||||
fileId: str = Field(
|
||||
description="FK to the source file",
|
||||
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem"}},
|
||||
json_schema_extra={"label": "Datei-ID", "fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"}},
|
||||
)
|
||||
userId: str = Field(
|
||||
description="Owner user ID",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
default="",
|
||||
description="Feature instance scope",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
contentType: str = Field(
|
||||
description="Content type: text, image, videostream, audiostream, other",
|
||||
|
|
@ -214,16 +224,16 @@ class WorkflowMemory(PowerOnModel):
|
|||
)
|
||||
workflowId: str = Field(
|
||||
description="FK to the workflow",
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow"}},
|
||||
json_schema_extra={"label": "Workflow-ID", "fk_target": {"db": "poweron_chat", "table": "ChatWorkflow", "labelField": "name"}},
|
||||
)
|
||||
userId: str = Field(
|
||||
description="Owner user ID",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
default="",
|
||||
description="Feature instance scope",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
key: str = Field(
|
||||
description="Key identifier (e.g. 'entity:companyName')",
|
||||
|
|
|
|||
|
|
@ -31,10 +31,7 @@ class UserMandate(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_fk_source": "/api/users/",
|
||||
"frontend_fk_display_field": "username",
|
||||
"fk_model": "User",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
mandateId: str = Field(
|
||||
|
|
@ -44,10 +41,7 @@ class UserMandate(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_fk_source": "/api/mandates/",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_model": "Mandate",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
enabled: bool = Field(
|
||||
|
|
@ -75,9 +69,7 @@ class FeatureAccess(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_fk_source": "/api/users/",
|
||||
"frontend_fk_display_field": "username",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -87,9 +79,7 @@ class FeatureAccess(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_fk_source": "/api/features/instances",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
enabled: bool = Field(
|
||||
|
|
@ -117,7 +107,7 @@ class UserMandateRole(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "UserMandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserMandate", "labelField": None},
|
||||
},
|
||||
)
|
||||
roleId: str = Field(
|
||||
|
|
@ -127,9 +117,7 @@ class UserMandateRole(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_fk_source": "/api/rbac/roles",
|
||||
"frontend_fk_display_field": "roleLabel",
|
||||
"fk_target": {"db": "poweron_app", "table": "Role"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Role", "labelField": "roleLabel"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -152,7 +140,7 @@ class FeatureAccessRole(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureAccess"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureAccess", "labelField": None},
|
||||
},
|
||||
)
|
||||
roleId: str = Field(
|
||||
|
|
@ -162,8 +150,6 @@ class FeatureAccessRole(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": True,
|
||||
"frontend_fk_source": "/api/rbac/roles",
|
||||
"frontend_fk_display_field": "roleLabel",
|
||||
"fk_target": {"db": "poweron_app", "table": "Role"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Role", "labelField": "roleLabel"},
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ class MessagingSubscription(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandanten-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -74,7 +74,7 @@ class MessagingSubscription(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Feature-Instanz-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
description: Optional[str] = Field(
|
||||
|
|
@ -131,7 +131,7 @@ class MessagingSubscriptionRegistration(BaseModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandanten-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -141,7 +141,7 @@ class MessagingSubscriptionRegistration(BaseModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Feature-Instanz-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
subscriptionId: str = Field(
|
||||
|
|
@ -160,7 +160,7 @@ class MessagingSubscriptionRegistration(BaseModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Benutzer-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
channel: MessagingChannel = Field(
|
||||
|
|
@ -249,7 +249,7 @@ class MessagingDelivery(BaseModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Benutzer-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
channel: MessagingChannel = Field(
|
||||
|
|
@ -296,7 +296,7 @@ class MessagingDelivery(BaseModel):
|
|||
default=None,
|
||||
description="When the delivery was sent (UTC timestamp in seconds)",
|
||||
json_schema_extra={
|
||||
"frontend_type": "datetime",
|
||||
"frontend_type": "timestamp",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Gesendet am",
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ class UserNotification(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -9,14 +9,95 @@ All models use camelStyle naming convention for consistency with frontend.
|
|||
from typing import List, Dict, Any, Optional, Generic, TypeVar
|
||||
from pydantic import BaseModel, Field, ConfigDict
|
||||
import math
|
||||
import uuid
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Group layout models (Strategy B — derived from Views, purely presentational)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class GroupByLevel(BaseModel):
|
||||
"""One level of a multi-level grouping definition, stored inside a TableListView config."""
|
||||
field: str = Field(..., description="Field key to group by")
|
||||
nullLabel: str = Field(default="—", description="Display label for null/empty values")
|
||||
direction: str = Field(
|
||||
default="asc",
|
||||
description="Order of group bands at this level: 'asc' or 'desc'",
|
||||
)
|
||||
|
||||
|
||||
class GroupBand(BaseModel):
|
||||
"""
|
||||
A contiguous block of rows that share the same group path, intersecting the current page.
|
||||
|
||||
startRowIndex and rowCount are 0-based indices relative to the current page's items[].
|
||||
"""
|
||||
path: List[str] = Field(..., description="Hierarchical group key (one entry per level)")
|
||||
label: str = Field(..., description="Display label for this band (last path element)")
|
||||
startRowIndex: int = Field(..., description="0-based start index within items[] on this page")
|
||||
rowCount: int = Field(..., description="Number of items in this band on this page")
|
||||
|
||||
|
||||
class GroupLayout(BaseModel):
|
||||
"""
|
||||
Grouping structure for the current response page.
|
||||
Included only when the effective view has groupByLevels configured.
|
||||
The frontend renders group header rows by iterating bands and inserting
|
||||
headers before each startRowIndex.
|
||||
"""
|
||||
levels: List[str] = Field(..., description="Ordered field keys that define the grouping hierarchy")
|
||||
bands: List[GroupBand] = Field(..., description="Bands intersecting the current page, in order")
|
||||
|
||||
|
||||
class AppliedViewMeta(BaseModel):
|
||||
"""Minimal metadata about the view that was applied to this response."""
|
||||
viewKey: Optional[str] = None
|
||||
displayName: Optional[str] = None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Persisted view model
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TableListView(BaseModel):
|
||||
"""
|
||||
A saved table view for one (userId, contextKey) pair.
|
||||
|
||||
config schema (schemaVersion=1):
|
||||
{
|
||||
"schemaVersion": 1,
|
||||
"filters": {}, # same structure as PaginationParams.filters
|
||||
"sort": [], # same structure as PaginationParams.sort
|
||||
"groupByLevels": [ # ordered grouping levels
|
||||
{"field": "scope", "nullLabel": "—", "direction": "asc"}
|
||||
],
|
||||
"collapsedSectionKeys": [], # optional: section UI (stable group keys)
|
||||
"collapsedGroupKeys": [], # optional: inline group bands (path.join('///'))
|
||||
}
|
||||
|
||||
contextKey convention: API path without /api/ prefix and without trailing slash.
|
||||
Examples: "connections", "prompts", "admin/users", "files/list"
|
||||
|
||||
viewKey is a user-defined slug, unique per (userId, mandateId, contextKey).
|
||||
"""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
userId: str
|
||||
mandateId: Optional[str] = None
|
||||
contextKey: str
|
||||
viewKey: str
|
||||
displayName: str
|
||||
config: Dict[str, Any] = Field(default_factory=dict)
|
||||
updatedAt: Optional[float] = None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Sort and pagination models
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class SortField(BaseModel):
|
||||
"""
|
||||
Single sort field configuration.
|
||||
"""
|
||||
"""Single sort field configuration."""
|
||||
field: str = Field(..., description="Field name to sort by")
|
||||
direction: str = Field(..., description="Sort direction: 'asc' or 'desc'")
|
||||
|
||||
|
|
@ -24,6 +105,14 @@ class SortField(BaseModel):
|
|||
class PaginationParams(BaseModel):
|
||||
"""
|
||||
Complete pagination state including page, sorting, and filters.
|
||||
|
||||
View extension (optional):
|
||||
viewKey — Slug of a saved TableListView for this (user, contextKey) pair.
|
||||
The server loads the view, merges its filters/sort/groupByLevels
|
||||
into the effective query (request fields take priority over view
|
||||
defaults for explicitly provided fields), and returns groupLayout
|
||||
in the response when groupByLevels is non-empty.
|
||||
Omit or set to None for the default (ungrouped) view.
|
||||
"""
|
||||
page: int = Field(ge=1, description="Current page number (1-based)")
|
||||
pageSize: int = Field(ge=1, le=1000, description="Number of items per page")
|
||||
|
|
@ -38,6 +127,17 @@ class PaginationParams(BaseModel):
|
|||
- Supported operators: equals/eq, contains, startsWith, endsWith, gt, gte, lt, lte, in, notIn
|
||||
- Multiple filters are combined with AND logic"""
|
||||
)
|
||||
viewKey: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Slug of a saved view to load; server merges view config into effective query",
|
||||
)
|
||||
groupByLevels: Optional[List[GroupByLevel]] = Field(
|
||||
default=None,
|
||||
description=(
|
||||
"When set (including an empty list), replaces the saved view's groupByLevels for this request. "
|
||||
"Omit entirely to use grouping from the view only."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class PaginationRequest(BaseModel):
|
||||
|
|
@ -74,9 +174,24 @@ class PaginationMetadata(BaseModel):
|
|||
class PaginatedResponse(BaseModel, Generic[T]):
|
||||
"""
|
||||
Response containing paginated data and metadata.
|
||||
|
||||
groupLayout is included when the effective view has groupByLevels configured.
|
||||
It describes how to render group header rows in the current page's items[].
|
||||
Omitted (None) when no grouping is active.
|
||||
|
||||
appliedView describes which saved view was merged into this response,
|
||||
allowing the frontend to synchronise its view selector.
|
||||
"""
|
||||
items: List[T] = Field(..., description="Array of items for current page")
|
||||
pagination: Optional[PaginationMetadata] = Field(..., description="Pagination metadata (None if pagination not applied)")
|
||||
groupLayout: Optional[GroupLayout] = Field(
|
||||
default=None,
|
||||
description="Group band structure for this page (None if no grouping active)",
|
||||
)
|
||||
appliedView: Optional[AppliedViewMeta] = Field(
|
||||
default=None,
|
||||
description="Metadata about the view applied to this response",
|
||||
)
|
||||
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||
|
||||
|
|
@ -84,30 +199,30 @@ class PaginatedResponse(BaseModel, Generic[T]):
|
|||
def normalize_pagination_dict(pagination_dict: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Normalize pagination dictionary to handle frontend variations.
|
||||
Moves top-level "search" field into filters if present.
|
||||
|
||||
Args:
|
||||
pagination_dict: Raw pagination dictionary from frontend
|
||||
|
||||
Returns:
|
||||
Normalized pagination dictionary ready for PaginationParams parsing
|
||||
- Moves top-level "search" field into filters if present.
|
||||
- Silently drops legacy fields (groupId, saveGroupTree) that were part of the
|
||||
old tree-grouping implementation so old clients do not cause validation errors.
|
||||
- Passes viewKey through unchanged.
|
||||
"""
|
||||
if not pagination_dict:
|
||||
return pagination_dict
|
||||
|
||||
# Create a copy to avoid modifying the original
|
||||
normalized = dict(pagination_dict)
|
||||
|
||||
# Ensure required fields have sensible defaults
|
||||
if "page" not in normalized:
|
||||
normalized["page"] = 1
|
||||
if "pageSize" not in normalized:
|
||||
normalized["pageSize"] = 25
|
||||
|
||||
# Move top-level "search" into filters if present
|
||||
# Move top-level "search" into filters
|
||||
if "search" in normalized:
|
||||
if "filters" not in normalized or normalized["filters"] is None:
|
||||
normalized["filters"] = {}
|
||||
normalized["filters"]["search"] = normalized.pop("search")
|
||||
|
||||
# Drop legacy tree-grouping fields — harmless if already absent
|
||||
normalized.pop("groupId", None)
|
||||
normalized.pop("saveGroupTree", None)
|
||||
|
||||
return normalized
|
||||
|
|
|
|||
|
|
@ -63,9 +63,7 @@ class Role(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_visible": True,
|
||||
"frontend_required": False,
|
||||
"frontend_fk_source": "/api/mandates/",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: Optional[str] = Field(
|
||||
|
|
@ -77,9 +75,7 @@ class Role(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_visible": True,
|
||||
"frontend_required": False,
|
||||
"frontend_fk_source": "/api/features/instances",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureCode: Optional[str] = Field(
|
||||
|
|
@ -115,9 +111,7 @@ class AccessRule(PowerOnModel):
|
|||
"frontend_type": "select",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"frontend_fk_source": "/api/rbac/roles",
|
||||
"frontend_fk_display_field": "roleLabel",
|
||||
"fk_target": {"db": "poweron_app", "table": "Role"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Role", "labelField": "roleLabel"},
|
||||
},
|
||||
)
|
||||
context: AccessRuleContext = Field(
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ class Token(PowerOnModel):
|
|||
)
|
||||
userId: str = Field(
|
||||
...,
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
authority: AuthAuthority = Field(
|
||||
...,
|
||||
|
|
@ -56,7 +56,7 @@ class Token(PowerOnModel):
|
|||
connectionId: Optional[str] = Field(
|
||||
None,
|
||||
description="ID of the connection this token belongs to",
|
||||
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection"}},
|
||||
json_schema_extra={"label": "Verbindungs-ID", "fk_target": {"db": "poweron_app", "table": "UserConnection", "labelField": "externalUsername"}},
|
||||
)
|
||||
tokenPurpose: Optional[TokenPurpose] = Field(
|
||||
default=None,
|
||||
|
|
@ -73,7 +73,7 @@ class Token(PowerOnModel):
|
|||
)
|
||||
expiresAt: float = Field(
|
||||
description="When the token expires (UTC timestamp in seconds)",
|
||||
json_schema_extra={"label": "Laeuft ab am"},
|
||||
json_schema_extra={"label": "Laeuft ab am", "frontend_type": "timestamp"},
|
||||
)
|
||||
tokenRefresh: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -87,12 +87,12 @@ class Token(PowerOnModel):
|
|||
revokedAt: Optional[float] = Field(
|
||||
None,
|
||||
description="When the token was revoked (UTC timestamp in seconds)",
|
||||
json_schema_extra={"label": "Widerrufen am"},
|
||||
json_schema_extra={"label": "Widerrufen am", "frontend_type": "timestamp"},
|
||||
)
|
||||
revokedBy: Optional[str] = Field(
|
||||
None,
|
||||
description="User ID who revoked the token (admin/self)",
|
||||
json_schema_extra={"label": "Widerrufen von", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Widerrufen von", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
reason: Optional[str] = Field(
|
||||
None,
|
||||
|
|
@ -139,7 +139,7 @@ class AuthEvent(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
eventType: str = Field(
|
||||
|
|
@ -149,7 +149,7 @@ class AuthEvent(PowerOnModel):
|
|||
timestamp: float = Field(
|
||||
default_factory=getUtcTimestamp,
|
||||
description="Unix timestamp when the event occurred",
|
||||
json_schema_extra={"label": "Zeitstempel", "frontend_type": "datetime", "frontend_readonly": True, "frontend_required": True},
|
||||
json_schema_extra={"label": "Zeitstempel", "frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": True},
|
||||
)
|
||||
ipAddress: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ StripePlanPrice (persisted Stripe IDs per plan).
|
|||
State Machine: see wiki/concepts/Subscription-State-Machine.md
|
||||
"""
|
||||
|
||||
from typing import Dict, List, Optional
|
||||
from typing import Any, Dict, List, Optional
|
||||
from enum import Enum
|
||||
from datetime import datetime, timezone
|
||||
from pydantic import BaseModel, Field
|
||||
|
|
@ -207,7 +207,7 @@ class MandateSubscription(PowerOnModel):
|
|||
mandateId: str = Field(
|
||||
...,
|
||||
description="Foreign key to Mandate",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
planKey: str = Field(
|
||||
...,
|
||||
|
|
@ -226,35 +226,35 @@ class MandateSubscription(PowerOnModel):
|
|||
json_schema_extra={"label": "Wiederkehrend"},
|
||||
)
|
||||
|
||||
startedAt: datetime = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc),
|
||||
description="Record creation timestamp",
|
||||
json_schema_extra={"label": "Gestartet"},
|
||||
startedAt: float = Field(
|
||||
default_factory=lambda: datetime.now(timezone.utc).timestamp(),
|
||||
description="Record creation timestamp (UTC unix)",
|
||||
json_schema_extra={"label": "Gestartet", "frontend_type": "timestamp"},
|
||||
)
|
||||
effectiveFrom: Optional[datetime] = Field(
|
||||
effectiveFrom: Optional[float] = Field(
|
||||
None,
|
||||
description="When this subscription becomes operative. None = immediate. Set for SCHEDULED subs.",
|
||||
json_schema_extra={"label": "Wirksam ab"},
|
||||
description="When this subscription becomes operative (UTC unix). None = immediate.",
|
||||
json_schema_extra={"label": "Wirksam ab", "frontend_type": "timestamp"},
|
||||
)
|
||||
endedAt: Optional[datetime] = Field(
|
||||
endedAt: Optional[float] = Field(
|
||||
None,
|
||||
description="When subscription ended (terminal)",
|
||||
json_schema_extra={"label": "Beendet"},
|
||||
description="When subscription ended (UTC unix)",
|
||||
json_schema_extra={"label": "Beendet", "frontend_type": "timestamp"},
|
||||
)
|
||||
currentPeriodStart: Optional[datetime] = Field(
|
||||
currentPeriodStart: Optional[float] = Field(
|
||||
None,
|
||||
description="Current billing period start (synced from Stripe)",
|
||||
json_schema_extra={"label": "Periodenbeginn"},
|
||||
description="Current billing period start (UTC unix, synced from Stripe)",
|
||||
json_schema_extra={"label": "Periodenbeginn", "frontend_type": "timestamp"},
|
||||
)
|
||||
currentPeriodEnd: Optional[datetime] = Field(
|
||||
currentPeriodEnd: Optional[float] = Field(
|
||||
None,
|
||||
description="Current billing period end (synced from Stripe)",
|
||||
json_schema_extra={"label": "Periodenende"},
|
||||
description="Current billing period end (UTC unix, synced from Stripe)",
|
||||
json_schema_extra={"label": "Periodenende", "frontend_type": "timestamp"},
|
||||
)
|
||||
trialEndsAt: Optional[datetime] = Field(
|
||||
trialEndsAt: Optional[float] = Field(
|
||||
None,
|
||||
description="Trial expiry timestamp",
|
||||
json_schema_extra={"label": "Trial endet"},
|
||||
description="Trial expiry timestamp (UTC unix)",
|
||||
json_schema_extra={"label": "Trial endet", "frontend_type": "timestamp"},
|
||||
)
|
||||
|
||||
snapshotPricePerUserCHF: float = Field(
|
||||
|
|
@ -284,12 +284,63 @@ class MandateSubscription(PowerOnModel):
|
|||
json_schema_extra={"label": "Stripe-Item (Instanzen)"},
|
||||
)
|
||||
|
||||
# Enterprise subscription fields (custom limits, no Stripe billing)
|
||||
isEnterprise: bool = Field(
|
||||
default=False,
|
||||
description="True for enterprise subscriptions managed by sysadmin with flat pricing",
|
||||
json_schema_extra={"label": "Enterprise-Abo"},
|
||||
)
|
||||
enterpriseFlatPriceCHF: Optional[float] = Field(
|
||||
None,
|
||||
description="Flat price per period (CHF) for enterprise subscriptions",
|
||||
json_schema_extra={"label": "Pauschale (CHF)"},
|
||||
)
|
||||
enterpriseMaxUsers: Optional[int] = Field(
|
||||
None,
|
||||
description="Custom user limit for enterprise (None = unlimited)",
|
||||
json_schema_extra={"label": "Enterprise Max. Benutzer"},
|
||||
)
|
||||
enterpriseMaxFeatureInstances: Optional[int] = Field(
|
||||
None,
|
||||
description="Custom feature instance limit for enterprise (None = unlimited)",
|
||||
json_schema_extra={"label": "Enterprise Max. Module"},
|
||||
)
|
||||
enterpriseMaxDataVolumeMB: Optional[int] = Field(
|
||||
None,
|
||||
description="Custom storage limit in MB for enterprise (None = unlimited)",
|
||||
json_schema_extra={"label": "Enterprise Datenvolumen (MB)"},
|
||||
)
|
||||
enterpriseBudgetAiCHF: Optional[float] = Field(
|
||||
None,
|
||||
description="Fixed AI budget per period (CHF) for enterprise subscriptions",
|
||||
json_schema_extra={"label": "Enterprise AI-Budget (CHF)"},
|
||||
)
|
||||
enterpriseNote: Optional[str] = Field(
|
||||
None,
|
||||
description="Free-text note (e.g. contract reference) for enterprise subscriptions",
|
||||
json_schema_extra={"label": "Enterprise Notiz"},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Built-in plan catalog (static, no env dependency)
|
||||
# ============================================================================
|
||||
|
||||
BUILTIN_PLANS: Dict[str, SubscriptionPlan] = {
|
||||
"ENTERPRISE": SubscriptionPlan(
|
||||
planKey="ENTERPRISE",
|
||||
selectableByUser=False,
|
||||
title=t("Enterprise"),
|
||||
description=t("Individuelles Pauschalen-Abonnement — Limiten und Preis vom Sysadmin festgelegt."),
|
||||
billingPeriod=BillingPeriodEnum.NONE,
|
||||
autoRenew=False,
|
||||
maxUsers=None,
|
||||
maxFeatureInstances=None,
|
||||
includedModules=0,
|
||||
maxDataVolumeMB=None,
|
||||
budgetAiCHF=0.0,
|
||||
budgetAiPerUserCHF=0.0,
|
||||
),
|
||||
"ROOT": SubscriptionPlan(
|
||||
planKey="ROOT",
|
||||
selectableByUser=False,
|
||||
|
|
@ -407,7 +458,7 @@ BUILTIN_PLANS: Dict[str, SubscriptionPlan] = {
|
|||
}
|
||||
|
||||
|
||||
def _getPlan(planKey: str) -> Optional[SubscriptionPlan]:
|
||||
def getPlan(planKey: str) -> Optional[SubscriptionPlan]:
|
||||
"""Resolve a plan by key from the built-in catalog."""
|
||||
return BUILTIN_PLANS.get(planKey)
|
||||
|
||||
|
|
@ -415,3 +466,35 @@ def _getPlan(planKey: str) -> Optional[SubscriptionPlan]:
|
|||
def _getSelectablePlans() -> List[SubscriptionPlan]:
|
||||
"""Return plans that users can choose in the UI."""
|
||||
return [p for p in BUILTIN_PLANS.values() if p.selectableByUser]
|
||||
|
||||
|
||||
def getEffectiveLimits(sub: Dict[str, Any], plan: Optional[SubscriptionPlan] = None) -> Dict[str, Any]:
|
||||
"""Resolve effective limits for a subscription.
|
||||
|
||||
For enterprise subscriptions the custom enterprise* fields on the subscription
|
||||
record take precedence. For standard subscriptions the plan catalog values are
|
||||
returned. Falls back to unlimited (None / 0) when neither source provides a
|
||||
value."""
|
||||
if sub.get("isEnterprise"):
|
||||
return {
|
||||
"maxUsers": sub.get("enterpriseMaxUsers"),
|
||||
"maxFeatureInstances": sub.get("enterpriseMaxFeatureInstances"),
|
||||
"maxDataVolumeMB": sub.get("enterpriseMaxDataVolumeMB"),
|
||||
"budgetAiCHF": sub.get("enterpriseBudgetAiCHF") or 0.0,
|
||||
"includedModules": sub.get("enterpriseMaxFeatureInstances") or 0,
|
||||
}
|
||||
if plan:
|
||||
return {
|
||||
"maxUsers": plan.maxUsers,
|
||||
"maxFeatureInstances": plan.maxFeatureInstances,
|
||||
"maxDataVolumeMB": plan.maxDataVolumeMB,
|
||||
"budgetAiCHF": plan.budgetAiCHF,
|
||||
"includedModules": plan.includedModules,
|
||||
}
|
||||
return {
|
||||
"maxUsers": None,
|
||||
"maxFeatureInstances": None,
|
||||
"maxDataVolumeMB": None,
|
||||
"budgetAiCHF": 0.0,
|
||||
"includedModules": 0,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -32,6 +32,7 @@ class AuthAuthority(str, Enum):
|
|||
GOOGLE = "google"
|
||||
MSFT = "msft"
|
||||
CLICKUP = "clickup"
|
||||
INFOMANIAK = "infomaniak"
|
||||
|
||||
class ConnectionStatus(str, Enum):
|
||||
ACTIVE = "active"
|
||||
|
|
@ -397,7 +398,7 @@ class UserConnection(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Benutzer-ID",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
authority: AuthAuthority = Field(
|
||||
|
|
@ -474,6 +475,22 @@ class UserConnection(PowerOnModel):
|
|||
description="OAuth scopes granted for this connection",
|
||||
json_schema_extra={"frontend_type": "list", "frontend_readonly": True, "frontend_required": False, "label": "Gewährte Berechtigungen"},
|
||||
)
|
||||
knowledgeIngestionEnabled: bool = Field(
|
||||
default=False,
|
||||
description="Whether the user has consented to knowledge ingestion for this connection",
|
||||
json_schema_extra={"frontend_type": "boolean", "frontend_readonly": False, "frontend_required": False, "label": "Wissensdatenbank aktiv"},
|
||||
)
|
||||
knowledgePreferences: Optional[Dict[str, Any]] = Field(
|
||||
default=None,
|
||||
description=(
|
||||
"Per-connection knowledge ingestion preferences. schemaVersion=1 keys: "
|
||||
"neutralizeBeforeEmbed (bool), mailContentDepth (metadata|snippet|full), "
|
||||
"mailIndexAttachments (bool), filesIndexBinaries (bool), mimeAllowlist (list[str]), "
|
||||
"clickupScope (titles|title_description|with_comments), "
|
||||
"surfaceToggles (dict per authority), maxAgeDays (int)."
|
||||
),
|
||||
json_schema_extra={"frontend_type": "json", "frontend_readonly": False, "frontend_required": False, "label": "Wissenspräferenzen"},
|
||||
)
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
|
|
@ -646,11 +663,11 @@ class UserInDB(User):
|
|||
resetTokenExpires: Optional[float] = Field(
|
||||
None,
|
||||
description="Reset token expiration (UTC timestamp in seconds)",
|
||||
json_schema_extra={"label": "Token läuft ab"},
|
||||
json_schema_extra={"label": "Token läuft ab", "frontend_type": "timestamp"},
|
||||
)
|
||||
|
||||
|
||||
def _normalizeTtsVoiceMap(value: Any) -> Optional[Dict[str, str]]:
|
||||
def normalizeTtsVoiceMap(value: Any) -> Optional[Dict[str, str]]:
|
||||
"""
|
||||
Coerce ttsVoiceMap payloads to Dict[str, str].
|
||||
|
||||
|
|
@ -687,12 +704,12 @@ class UserVoicePreferences(PowerOnModel):
|
|||
)
|
||||
userId: str = Field(
|
||||
description="User ID",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
mandateId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate scope (None = global for user)",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
sttLanguage: str = Field(
|
||||
default="de-DE",
|
||||
|
|
@ -728,6 +745,6 @@ class UserVoicePreferences(PowerOnModel):
|
|||
@field_validator("ttsVoiceMap", mode="before")
|
||||
@classmethod
|
||||
def _validateTtsVoiceMap(cls, value: Any) -> Optional[Dict[str, str]]:
|
||||
return _normalizeTtsVoiceMap(value)
|
||||
return normalizeTtsVoiceMap(value)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -14,8 +14,8 @@ from modules.datamodels.datamodelExtraction import ContentExtracted, ContentPart
|
|||
class UdmMetadata(BaseModel):
|
||||
title: Optional[str] = None
|
||||
author: Optional[str] = None
|
||||
createdAt: Optional[str] = None
|
||||
modifiedAt: Optional[str] = None
|
||||
createdAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
modifiedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
sourcePath: str = ""
|
||||
tags: List[str] = Field(default_factory=list)
|
||||
custom: Dict[str, Any] = Field(default_factory=dict)
|
||||
|
|
@ -177,7 +177,7 @@ def _groupKeyForPart(part: ContentPart) -> Tuple[str, int, str]:
|
|||
_VALID_DOC_SOURCES = frozenset({"pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"})
|
||||
|
||||
|
||||
def _contentPartsToUdm(extracted: ContentExtracted, sourceType: str, sourcePath: str) -> UdmDocument:
|
||||
def contentPartsToUdm(extracted: ContentExtracted, sourceType: str, sourcePath: str) -> UdmDocument:
|
||||
"""Convert flat ContentPart list into a UdmDocument using structural heuristics."""
|
||||
parts = list(extracted.parts or [])
|
||||
st: Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"] = (
|
||||
|
|
@ -290,7 +290,7 @@ def _stripUdmForReferences(udm: UdmDocument) -> UdmDocument:
|
|||
return clone
|
||||
|
||||
|
||||
def _applyUdmOutputDetail(udm: UdmDocument, detail: str) -> UdmDocument:
|
||||
def applyUdmOutputDetail(udm: UdmDocument, detail: str) -> UdmDocument:
|
||||
if detail == "structure":
|
||||
return _stripUdmRaw(udm)
|
||||
if detail == "references":
|
||||
|
|
@ -298,7 +298,7 @@ def _applyUdmOutputDetail(udm: UdmDocument, detail: str) -> UdmDocument:
|
|||
return udm
|
||||
|
||||
|
||||
def _mimeToUdmSourceType(mimeType: str, fileName: str) -> Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"]:
|
||||
def mimeToUdmSourceType(mimeType: str, fileName: str) -> Literal["pdf", "docx", "pptx", "xlsx", "html", "binary", "unknown"]:
|
||||
m = (mimeType or "").lower()
|
||||
fn = (fileName or "").lower()
|
||||
if m == "application/pdf" or fn.endswith(".pdf"):
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ class Prompt(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
isSystem: bool = Field(
|
||||
|
|
|
|||
311
modules/datamodels/datamodelViews.py
Normal file
311
modules/datamodels/datamodelViews.py
Normal file
|
|
@ -0,0 +1,311 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
View models for the /api/attributes/ endpoint.
|
||||
|
||||
These extend base DB models with computed / enriched fields that the gateway
|
||||
adds at response time (JOINs, aggregations, synthetics). They are NEVER used
|
||||
for DB operations — only for ``getModelAttributeDefinitions()`` so the frontend
|
||||
can resolve column types via ``resolveColumnTypes`` without hardcoding.
|
||||
|
||||
Naming convention: ``{BaseModel}View``.
|
||||
|
||||
``getModelClasses()`` in ``attributeUtils.py`` auto-discovers every
|
||||
``datamodel*.py`` under ``modules/datamodels/`` — so placing them here is
|
||||
sufficient for registration.
|
||||
"""
|
||||
|
||||
from typing import Optional, List
|
||||
from pydantic import Field
|
||||
|
||||
from modules.datamodels.datamodelBase import MODEL_REGISTRY, PowerOnModel
|
||||
from modules.datamodels.datamodelMembership import UserMandate, FeatureAccess
|
||||
from modules.datamodels.datamodelBilling import BillingTransaction
|
||||
from modules.datamodels.datamodelSubscription import MandateSubscription
|
||||
from modules.datamodels.datamodelUiLanguage import UiLanguageSet
|
||||
from modules.datamodels.datamodelRbac import Role
|
||||
from modules.features.neutralization.datamodelFeatureNeutralizer import DataNeutralizerAttributes
|
||||
from modules.shared.i18nRegistry import i18nModel
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 1a: UserMandate + enriched user fields
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Benutzer-Mandant (Ansicht)")
|
||||
class UserMandateView(UserMandate):
|
||||
"""UserMandate erweitert um aufgeloeste Benutzerfelder und Rollenlabels."""
|
||||
|
||||
username: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Username (resolved from userId)",
|
||||
json_schema_extra={"label": "Benutzername", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
email: Optional[str] = Field(
|
||||
default=None,
|
||||
description="E-Mail address (resolved from userId)",
|
||||
json_schema_extra={"label": "E-Mail", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
fullName: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Full name (resolved from userId)",
|
||||
json_schema_extra={"label": "Vollstaendiger Name", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
roleLabels: Optional[List[str]] = Field(
|
||||
default=None,
|
||||
description="Role labels (resolved from junction table)",
|
||||
json_schema_extra={"label": "Rollen", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 1b: FeatureAccess + enriched user fields
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Feature-Zugang (Ansicht)")
|
||||
class FeatureAccessView(FeatureAccess):
|
||||
"""FeatureAccess erweitert um aufgeloeste Benutzerfelder und Rollenlabels."""
|
||||
|
||||
username: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Username (resolved from userId)",
|
||||
json_schema_extra={"label": "Benutzername", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
email: Optional[str] = Field(
|
||||
default=None,
|
||||
description="E-Mail address (resolved from userId)",
|
||||
json_schema_extra={"label": "E-Mail", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
fullName: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Full name (resolved from userId)",
|
||||
json_schema_extra={"label": "Vollstaendiger Name", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
roleLabels: Optional[List[str]] = Field(
|
||||
default=None,
|
||||
description="Role labels (resolved from junction table)",
|
||||
json_schema_extra={"label": "Rollen", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 1d: BillingTransaction + enriched mandate/user names
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Transaktion (Ansicht)")
|
||||
class BillingTransactionView(BillingTransaction):
|
||||
"""BillingTransaction erweitert um aufgeloeste Mandanten-/Benutzernamen."""
|
||||
|
||||
mandateName: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate name (resolved from accountId/mandateId)",
|
||||
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
userName: Optional[str] = Field(
|
||||
default=None,
|
||||
description="User name (resolved from createdByUserId)",
|
||||
json_schema_extra={"label": "Benutzer", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 3a: MandateSubscription + aggregated fields
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Abonnement (Ansicht)")
|
||||
class MandateSubscriptionView(MandateSubscription):
|
||||
"""MandateSubscription erweitert um aggregierte Laufzeitwerte."""
|
||||
|
||||
mandateName: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate name (resolved from mandateId)",
|
||||
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
planTitle: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Plan title (resolved from planKey)",
|
||||
json_schema_extra={"label": "Plan", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
activeUsers: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of active users in the mandate",
|
||||
json_schema_extra={"label": "Benutzer", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
activeInstances: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of active feature instances in the mandate",
|
||||
json_schema_extra={"label": "Module", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
monthlyRevenueCHF: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Calculated monthly revenue in CHF",
|
||||
json_schema_extra={"label": "Umsatz pro Monat", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 3b: UiLanguageSet + computed counts
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Sprachset (Ansicht)")
|
||||
class UiLanguageSetView(UiLanguageSet):
|
||||
"""UiLanguageSet erweitert um berechnete Uebersetzungszaehler."""
|
||||
|
||||
uiCount: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of UI translation entries",
|
||||
json_schema_extra={"label": "UI", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
gatewayCount: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of gateway/API translation entries",
|
||||
json_schema_extra={"label": "API", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
entriesCount: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Total number of translation entries",
|
||||
json_schema_extra={"label": "Gesamt", "frontend_type": "number", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Punkt 1c: DataNeutralizerAttributes + enriched fields
|
||||
#
|
||||
# DataNeutralizerAttributes extends BaseModel (not PowerOnModel), so its
|
||||
# subclass does NOT auto-register in MODEL_REGISTRY. We register manually.
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Neutralisierungs-Zuordnung (Ansicht)")
|
||||
class DataNeutralizerAttributesView(DataNeutralizerAttributes):
|
||||
"""DataNeutralizerAttributes erweitert um synthetische/aufgeloeste Felder."""
|
||||
|
||||
placeholder: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Synthetic placeholder string [patternType.id]",
|
||||
json_schema_extra={"label": "Platzhalter", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
username: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Username (resolved from userId)",
|
||||
json_schema_extra={"label": "Benutzer", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
instanceLabel: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature instance label (resolved from featureInstanceId)",
|
||||
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
|
||||
|
||||
# Manual registration for non-PowerOnModel view
|
||||
MODEL_REGISTRY["DataNeutralizerAttributesView"] = DataNeutralizerAttributesView # type: ignore[assignment]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Role view — admin RBAC list with computed `scopeType` + `userCount`
|
||||
#
|
||||
# `scopeType` is computed in the route from (mandateId, isSystemRole). Exposed
|
||||
# here as a pure `select` field so the frontend renders the user-facing label
|
||||
# from `frontend_options` (no hardcoded mapping in the page).
|
||||
# ============================================================================
|
||||
|
||||
@i18nModel("Rolle (Ansicht)")
|
||||
class RoleView(Role):
|
||||
"""Role extended with computed scope information for the admin UI."""
|
||||
|
||||
scopeType: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Computed scope: 'system' (template), 'global', or 'mandate'.",
|
||||
json_schema_extra={
|
||||
"label": "Geltungsbereich",
|
||||
"frontend_type": "select",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"frontend_options": [
|
||||
{"value": "system", "label": "System-Template"},
|
||||
{"value": "global", "label": "Template"},
|
||||
{"value": "mandate", "label": "Mandant"},
|
||||
],
|
||||
},
|
||||
)
|
||||
userCount: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Number of users assigned to this role (via UserMandateRole).",
|
||||
json_schema_extra={
|
||||
"label": "Benutzer",
|
||||
"frontend_type": "number",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Automation Workflow — dashboard view with synthesized fields
|
||||
# ============================================================================
|
||||
|
||||
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import AutoWorkflow
|
||||
|
||||
|
||||
@i18nModel("Workflow (Ansicht)")
|
||||
class Automation2WorkflowView(AutoWorkflow):
|
||||
"""AutoWorkflow extended with computed dashboard fields.
|
||||
|
||||
Used exclusively for /api/attributes/ so the frontend can resolve column
|
||||
types for the workflow dashboard table (FormGeneratorTable).
|
||||
"""
|
||||
|
||||
sysCreatedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Record creation timestamp (UTC)",
|
||||
json_schema_extra={
|
||||
"label": "Erstellt",
|
||||
"frontend_type": "timestamp",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
lastStartedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Timestamp of the most recent workflow run start",
|
||||
json_schema_extra={
|
||||
"label": "Zuletzt gestartet",
|
||||
"frontend_type": "timestamp",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
runCount: Optional[int] = Field(
|
||||
default=None,
|
||||
description="Total number of runs for this workflow",
|
||||
json_schema_extra={
|
||||
"label": "Laeufe",
|
||||
"frontend_type": "number",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
},
|
||||
)
|
||||
mandateLabel: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Mandate name (resolved from mandateId)",
|
||||
json_schema_extra={"label": "Mandant", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
instanceLabel: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature instance label (resolved from featureInstanceId)",
|
||||
json_schema_extra={"label": "Feature-Instanz", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
featureCode: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Feature code of the owning instance",
|
||||
json_schema_extra={"label": "Feature", "frontend_type": "text", "frontend_readonly": True},
|
||||
)
|
||||
isRunning: Optional[bool] = Field(
|
||||
default=None,
|
||||
description="Whether the workflow currently has an active run",
|
||||
json_schema_extra={
|
||||
"label": "Läuft",
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_readonly": True,
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
|
|
@ -22,9 +22,24 @@ class WorkflowActionParameter(BaseModel):
|
|||
json_schema_extra={"label": "Name"},
|
||||
)
|
||||
type: str = Field(
|
||||
description="Python type as string: 'str', 'int', 'bool', 'List[str]', etc.",
|
||||
description=(
|
||||
"Type reference. Either a primitive ('str', 'int', 'bool', 'float', 'Any', "
|
||||
"'List[str]', 'Dict[str,Any]', …) or a PORT_TYPE_CATALOG schema name "
|
||||
"(e.g. 'ConnectionRef', 'FeatureInstanceRef', 'DocumentList', "
|
||||
"'TrusteeProcessResult'). Catalog types are validated by "
|
||||
"_actionSignatureValidator at startup."
|
||||
),
|
||||
json_schema_extra={"label": "Typ"},
|
||||
)
|
||||
uiHint: Optional[str] = Field(
|
||||
None,
|
||||
description=(
|
||||
"Optional UI rendering hint for adapters. "
|
||||
"Free-form (e.g. 'textarea', 'cron', 'fieldBuilder'). "
|
||||
"Adapters can override; defaults derive from frontendType when absent."
|
||||
),
|
||||
json_schema_extra={"label": "UI-Hinweis"},
|
||||
)
|
||||
frontendType: FrontendType = Field(
|
||||
description="UI rendering type (from global FrontendType enum)",
|
||||
json_schema_extra={"label": "Frontend-Typ"},
|
||||
|
|
@ -80,6 +95,16 @@ class WorkflowActionDefinition(BaseModel):
|
|||
description="Parameter schema definitions",
|
||||
json_schema_extra={"label": "Parameter"},
|
||||
)
|
||||
outputType: str = Field(
|
||||
"ActionResult",
|
||||
description=(
|
||||
"PORT_TYPE_CATALOG schema name produced by this action "
|
||||
"(e.g. 'TrusteeProcessResult', 'EmailDraft', 'DocumentList'). "
|
||||
"Defaults to 'ActionResult' for fire-and-forget actions. "
|
||||
"Validated by _actionSignatureValidator at startup."
|
||||
),
|
||||
json_schema_extra={"label": "Ausgabe-Typ"},
|
||||
)
|
||||
execute: Optional[Callable] = Field(
|
||||
None,
|
||||
description="Execution function - async function that takes parameters dict and returns ActionResult. Set dynamically.",
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
Demo Configs — Auto-Discovery Module
|
||||
|
||||
Scans this folder for Python files that contain subclasses of _BaseDemoConfig
|
||||
and exposes them via _getAvailableDemoConfigs().
|
||||
and exposes them via getAvailableDemoConfigs().
|
||||
"""
|
||||
|
||||
import importlib
|
||||
|
|
@ -18,7 +18,7 @@ logger = logging.getLogger(__name__)
|
|||
_configCache: Dict[str, _BaseDemoConfig] = {}
|
||||
|
||||
|
||||
def _getAvailableDemoConfigs() -> Dict[str, _BaseDemoConfig]:
|
||||
def getAvailableDemoConfigs() -> Dict[str, _BaseDemoConfig]:
|
||||
"""Return a dict of code -> instance for every discovered demo config."""
|
||||
if _configCache:
|
||||
return _configCache
|
||||
|
|
@ -43,7 +43,7 @@ def _getAvailableDemoConfigs() -> Dict[str, _BaseDemoConfig]:
|
|||
return _configCache
|
||||
|
||||
|
||||
def _getDemoConfigByCode(code: str) -> _BaseDemoConfig | None:
|
||||
def getDemoConfigByCode(code: str) -> _BaseDemoConfig | None:
|
||||
"""Get a specific demo config by its code."""
|
||||
configs = _getAvailableDemoConfigs()
|
||||
configs = getAvailableDemoConfigs()
|
||||
return configs.get(code)
|
||||
|
|
|
|||
|
|
@ -4,11 +4,16 @@ Base class for demo configurations.
|
|||
Each demo config file in this folder extends _BaseDemoConfig and provides
|
||||
idempotent load() and remove() methods for setting up / tearing down
|
||||
a complete demo environment (mandates, users, features, test data, etc.).
|
||||
|
||||
Subclasses MUST also declare ``credentials`` so the SysAdmin who triggers a
|
||||
demo-load gets the initial username / password pair shown in the UI -- this
|
||||
avoids the "where do I find the password?" anti-pattern of having to grep the
|
||||
source code.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, Any
|
||||
from typing import Any, Dict, List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -20,6 +25,13 @@ class _BaseDemoConfig(ABC):
|
|||
label: str = ""
|
||||
description: str = ""
|
||||
|
||||
# Each entry describes one bootstrapped login that the demo creates.
|
||||
# Shape: {"role": "Demo-Sachbearbeiter", "username": "pwg.demo",
|
||||
# "email": "pwg.demo@poweron.swiss", "password": "pwg.demo.2026"}
|
||||
# Surfaced via GET /api/admin/demo-config and inside the load() summary
|
||||
# so the AdminDemoConfigPage can display it (no source-code grep needed).
|
||||
credentials: List[Dict[str, str]] = []
|
||||
|
||||
@abstractmethod
|
||||
def load(self, db) -> Dict[str, Any]:
|
||||
"""Create all demo data (idempotent). Returns summary dict."""
|
||||
|
|
@ -35,4 +47,5 @@ class _BaseDemoConfig(ABC):
|
|||
"code": self.code,
|
||||
"label": self.label,
|
||||
"description": self.description,
|
||||
"credentials": list(self.credentials or []),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -64,6 +64,14 @@ class InvestorDemo2026(_BaseDemoConfig):
|
|||
"Two mandates (HappyLife AG + Alpina Treuhand AG), one SysAdmin user, "
|
||||
"trustee with RMA, workspace, graph editor, and neutralization."
|
||||
)
|
||||
credentials = [
|
||||
{
|
||||
"role": "SysAdmin Demo",
|
||||
"username": _USER["username"],
|
||||
"email": _USER["email"],
|
||||
"password": _USER["password"],
|
||||
}
|
||||
]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# load
|
||||
|
|
@ -101,6 +109,10 @@ class InvestorDemo2026(_BaseDemoConfig):
|
|||
logger.error(f"Demo load failed: {e}", exc_info=True)
|
||||
summary["errors"].append(str(e))
|
||||
|
||||
# Surface initial credentials so the SysAdmin doesn't have to grep the
|
||||
# source code -- consumed by AdminDemoConfigPage to render a copyable
|
||||
# login box in the result banner.
|
||||
summary["credentials"] = list(self.credentials)
|
||||
return summary
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
|
|
@ -268,10 +280,17 @@ class InvestorDemo2026(_BaseDemoConfig):
|
|||
logger.error(f"Failed to create feature '{instanceLabel}' ({code}) in {mandateLabel}: {e}")
|
||||
|
||||
def _ensureFeatureAccess(self, db, userId: str, mandateId: str, mandateLabel: str, summary: Dict):
|
||||
"""Grant the demo user admin access to every feature instance in the mandate."""
|
||||
"""Grant the demo user admin access on EVERY feature instance of the
|
||||
mandate. Without an explicit ``FeatureAccess`` + ``{code}-admin`` role
|
||||
the user does not see any feature tile in the UI -- so this method
|
||||
ALSO heals a half-broken state by re-copying the per-feature template
|
||||
roles if they are missing (e.g. when the instance was created via an
|
||||
older code path that skipped ``copyTemplateRoles``).
|
||||
"""
|
||||
from modules.datamodels.datamodelFeatures import FeatureInstance
|
||||
from modules.datamodels.datamodelMembership import FeatureAccess, FeatureAccessRole
|
||||
from modules.datamodels.datamodelRbac import Role
|
||||
from modules.interfaces.interfaceFeatures import getFeatureInterface
|
||||
|
||||
instances = db.getRecordset(FeatureInstance, recordFilter={"mandateId": mandateId}) or []
|
||||
|
||||
|
|
@ -297,16 +316,50 @@ class InvestorDemo2026(_BaseDemoConfig):
|
|||
"featureInstanceId": instId,
|
||||
"roleLabel": adminRoleLabel,
|
||||
})
|
||||
if adminRoles:
|
||||
adminRoleId = adminRoles[0].get("id")
|
||||
existingRole = db.getRecordset(FeatureAccessRole, recordFilter={
|
||||
"featureAccessId": featureAccessId,
|
||||
"roleId": adminRoleId,
|
||||
|
||||
# Self-heal: if the per-feature admin role does not exist on this
|
||||
# instance the template roles were never copied -- copy them now.
|
||||
if not adminRoles:
|
||||
logger.warning(
|
||||
"Feature instance %s (%s) is missing role '%s' -- "
|
||||
"re-copying template roles", instId, featureCode, adminRoleLabel,
|
||||
)
|
||||
try:
|
||||
fi = getFeatureInterface(db)
|
||||
fi._copyTemplateRoles(featureCode, mandateId, instId)
|
||||
summary["created"].append(
|
||||
f"Repaired template roles for {featureCode} in {mandateLabel}"
|
||||
)
|
||||
except Exception as repairErr:
|
||||
summary["errors"].append(
|
||||
f"Could not repair template roles for {featureCode} "
|
||||
f"in {mandateLabel}: {repairErr}"
|
||||
)
|
||||
adminRoles = db.getRecordset(Role, recordFilter={
|
||||
"featureInstanceId": instId,
|
||||
"roleLabel": adminRoleLabel,
|
||||
})
|
||||
if not existingRole:
|
||||
far = FeatureAccessRole(featureAccessId=featureAccessId, roleId=adminRoleId)
|
||||
db.recordCreate(FeatureAccessRole, far)
|
||||
logger.info(f"Assigned {adminRoleLabel} role in {mandateLabel}")
|
||||
|
||||
if not adminRoles:
|
||||
summary["errors"].append(
|
||||
f"Admin role '{adminRoleLabel}' not found for feature "
|
||||
f"instance {featureCode} in {mandateLabel} -- demo user "
|
||||
f"will not see this feature."
|
||||
)
|
||||
continue
|
||||
|
||||
adminRoleId = adminRoles[0].get("id")
|
||||
existingRole = db.getRecordset(FeatureAccessRole, recordFilter={
|
||||
"featureAccessId": featureAccessId,
|
||||
"roleId": adminRoleId,
|
||||
})
|
||||
if not existingRole:
|
||||
far = FeatureAccessRole(featureAccessId=featureAccessId, roleId=adminRoleId)
|
||||
db.recordCreate(FeatureAccessRole, far)
|
||||
summary["created"].append(
|
||||
f"Role '{adminRoleLabel}' assigned to demo user in {mandateLabel}"
|
||||
)
|
||||
logger.info(f"Assigned {adminRoleLabel} role in {mandateLabel}")
|
||||
|
||||
def _ensureTrusteeRmaConfig(self, db, mandateId: Optional[str], mandateLabel: str, summary: Dict):
|
||||
if not mandateId:
|
||||
|
|
@ -394,10 +447,10 @@ class InvestorDemo2026(_BaseDemoConfig):
|
|||
if not mandateId:
|
||||
return
|
||||
try:
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
||||
from modules.interfaces.interfaceDbBilling import getRootInterface
|
||||
from modules.datamodels.datamodelBilling import BillingSettings
|
||||
|
||||
billingInterface = _getRootInterface()
|
||||
billingInterface = getRootInterface()
|
||||
existingSettings = billingInterface.getSettings(mandateId)
|
||||
if existingSettings:
|
||||
summary["skipped"].append(f"Billing for {mandateLabel} exists")
|
||||
|
|
@ -479,8 +532,8 @@ class InvestorDemo2026(_BaseDemoConfig):
|
|||
summary["removed"].append(f"{len(roles)} roles in {mandateLabel}")
|
||||
|
||||
try:
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
||||
billingDb = _getRootInterface().db
|
||||
from modules.interfaces.interfaceDbBilling import getRootInterface
|
||||
billingDb = getRootInterface().db
|
||||
billingSettings = billingDb.getRecordset(BillingSettings, recordFilter={"mandateId": mandateId}) or []
|
||||
for bs in billingSettings:
|
||||
billingDb.recordDelete(BillingSettings, bs.get("id"))
|
||||
|
|
|
|||
|
|
@ -67,6 +67,14 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
"Graph-Editor mit dem Pilot-Workflow für Jahresmietzinsbestätigungen "
|
||||
"(als File importiert, active=false). Idempotent."
|
||||
)
|
||||
credentials = [
|
||||
{
|
||||
"role": "Demo-Sachbearbeiter",
|
||||
"username": _USER["username"],
|
||||
"email": _USER["email"],
|
||||
"password": _USER["password"],
|
||||
}
|
||||
]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# load
|
||||
|
|
@ -98,6 +106,10 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
logger.error(f"PWG demo load failed: {e}", exc_info=True)
|
||||
summary["errors"].append(str(e))
|
||||
|
||||
# Surface initial credentials so the SysAdmin doesn't have to grep the
|
||||
# source code -- consumed by AdminDemoConfigPage to render a copyable
|
||||
# login box in the result banner.
|
||||
summary["credentials"] = list(self.credentials)
|
||||
return summary
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
|
|
@ -253,9 +265,17 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
summary["errors"].append(f"Feature '{instanceLabel}' in {mandateLabel}: {e}")
|
||||
|
||||
def _ensureFeatureAccess(self, db, userId: str, mandateId: str, mandateLabel: str, summary: Dict):
|
||||
"""Grant the demo user admin access on EVERY feature instance of the
|
||||
mandate. Without an explicit ``FeatureAccess`` + ``{code}-admin`` role
|
||||
the user does not see any feature tile in the UI -- so this method
|
||||
ALSO heals a half-broken state by re-copying the per-feature template
|
||||
roles if they are missing (e.g. when the instance was created via an
|
||||
older code path that skipped ``copyTemplateRoles``).
|
||||
"""
|
||||
from modules.datamodels.datamodelFeatures import FeatureInstance
|
||||
from modules.datamodels.datamodelMembership import FeatureAccess, FeatureAccessRole
|
||||
from modules.datamodels.datamodelRbac import Role
|
||||
from modules.interfaces.interfaceFeatures import getFeatureInterface
|
||||
|
||||
instances = db.getRecordset(FeatureInstance, recordFilter={"mandateId": mandateId}) or []
|
||||
|
||||
|
|
@ -280,15 +300,51 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
"featureInstanceId": instId,
|
||||
"roleLabel": adminRoleLabel,
|
||||
})
|
||||
if adminRoles:
|
||||
adminRoleId = adminRoles[0].get("id")
|
||||
existingRole = db.getRecordset(FeatureAccessRole, recordFilter={
|
||||
"featureAccessId": featureAccessId,
|
||||
"roleId": adminRoleId,
|
||||
|
||||
# Self-heal: if the per-feature admin role does not exist on this
|
||||
# instance the template roles were never copied -- copy them now.
|
||||
if not adminRoles:
|
||||
logger.warning(
|
||||
"Feature instance %s (%s) is missing role '%s' -- "
|
||||
"re-copying template roles", instId, featureCode, adminRoleLabel,
|
||||
)
|
||||
try:
|
||||
fi = getFeatureInterface(db)
|
||||
fi._copyTemplateRoles(featureCode, mandateId, instId)
|
||||
summary["created"].append(
|
||||
f"Repaired template roles for {featureCode} in {mandateLabel}"
|
||||
)
|
||||
except Exception as repairErr:
|
||||
summary["errors"].append(
|
||||
f"Could not repair template roles for {featureCode} "
|
||||
f"in {mandateLabel}: {repairErr}"
|
||||
)
|
||||
adminRoles = db.getRecordset(Role, recordFilter={
|
||||
"featureInstanceId": instId,
|
||||
"roleLabel": adminRoleLabel,
|
||||
})
|
||||
if not existingRole:
|
||||
far = FeatureAccessRole(featureAccessId=featureAccessId, roleId=adminRoleId)
|
||||
db.recordCreate(FeatureAccessRole, far)
|
||||
|
||||
if not adminRoles:
|
||||
# Hard fail surfaced to UI -- without the admin role the user
|
||||
# would silently not see the instance.
|
||||
summary["errors"].append(
|
||||
f"Admin role '{adminRoleLabel}' not found for feature "
|
||||
f"instance {featureCode} in {mandateLabel} -- demo user "
|
||||
f"will not see this feature."
|
||||
)
|
||||
continue
|
||||
|
||||
adminRoleId = adminRoles[0].get("id")
|
||||
existingRole = db.getRecordset(FeatureAccessRole, recordFilter={
|
||||
"featureAccessId": featureAccessId,
|
||||
"roleId": adminRoleId,
|
||||
})
|
||||
if not existingRole:
|
||||
far = FeatureAccessRole(featureAccessId=featureAccessId, roleId=adminRoleId)
|
||||
db.recordCreate(FeatureAccessRole, far)
|
||||
summary["created"].append(
|
||||
f"Role '{adminRoleLabel}' assigned to demo user in {mandateLabel}"
|
||||
)
|
||||
|
||||
def _ensureNeutralizationConfig(self, db, mandateId: Optional[str], userId: Optional[str], summary: Dict):
|
||||
if not mandateId or not userId:
|
||||
|
|
@ -321,9 +377,9 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
return
|
||||
try:
|
||||
from modules.datamodels.datamodelBilling import BillingSettings
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
||||
from modules.interfaces.interfaceDbBilling import getRootInterface
|
||||
|
||||
billingInterface = _getRootInterface()
|
||||
billingInterface = getRootInterface()
|
||||
existingSettings = billingInterface.getSettings(mandateId)
|
||||
if existingSettings:
|
||||
summary["skipped"].append(f"Billing for {mandateLabel} exists")
|
||||
|
|
@ -447,11 +503,12 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
if monthlyRent <= 0:
|
||||
continue
|
||||
for month in range(1, 13):
|
||||
bookingDate = f"{year}-{month:02d}-01"
|
||||
from datetime import datetime as _dtCls, timezone as _tzCls
|
||||
bookingTs = _dtCls(year, month, 1, tzinfo=_tzCls.utc).timestamp()
|
||||
entryRef = f"PWG-{tenant.get('contactNumber')}-{year}{month:02d}"
|
||||
entry = TrusteeDataJournalEntry(
|
||||
externalId=entryRef,
|
||||
bookingDate=bookingDate,
|
||||
bookingDate=bookingTs,
|
||||
reference=entryRef,
|
||||
description=f"Mietzins {month:02d}/{year} {name}",
|
||||
currency="CHF",
|
||||
|
|
@ -652,8 +709,8 @@ class PwgDemo2026(_BaseDemoConfig):
|
|||
db.recordDelete(Role, role.get("id"))
|
||||
|
||||
try:
|
||||
from modules.interfaces.interfaceDbBilling import _getRootInterface
|
||||
billingDb = _getRootInterface().db
|
||||
from modules.interfaces.interfaceDbBilling import getRootInterface
|
||||
billingDb = getRootInterface().db
|
||||
billingSettings = billingDb.getRecordset(BillingSettings, recordFilter={"mandateId": mandateId}) or []
|
||||
for bs in billingSettings:
|
||||
billingDb.recordDelete(BillingSettings, bs.get("id"))
|
||||
|
|
|
|||
|
|
@ -139,13 +139,13 @@ def storeDebugMessageAndDocuments(message, currentUser, mandateId=None, featureI
|
|||
try:
|
||||
import os
|
||||
from datetime import datetime, UTC
|
||||
from modules.shared.debugLogger import _getBaseDebugDir, _ensureDir
|
||||
from modules.shared.debugLogger import getBaseDebugDir, ensureDir
|
||||
from modules.interfaces.interfaceDbManagement import getInterface
|
||||
|
||||
# Create base debug directory (use base debug dir, not prompts subdirectory)
|
||||
baseDebugDir = _getBaseDebugDir()
|
||||
baseDebugDir = getBaseDebugDir()
|
||||
debug_root = os.path.join(baseDebugDir, 'messages')
|
||||
_ensureDir(debug_root)
|
||||
ensureDir(debug_root)
|
||||
|
||||
# Generate timestamp
|
||||
timestamp = datetime.now(UTC).strftime('%Y%m%d-%H%M%S-%f')[:-3]
|
||||
|
|
@ -210,7 +210,7 @@ def storeDebugMessageAndDocuments(message, currentUser, mandateId=None, featureI
|
|||
safe_label = "default"
|
||||
|
||||
label_folder = os.path.join(message_path, safe_label)
|
||||
_ensureDir(label_folder)
|
||||
ensureDir(label_folder)
|
||||
|
||||
# Store each document
|
||||
for i, doc in enumerate(docs):
|
||||
|
|
@ -401,8 +401,8 @@ class ChatObjects:
|
|||
dbPassword = APP_CONFIG.get("DB_PASSWORD_SECRET")
|
||||
dbPort = int(APP_CONFIG.get("DB_PORT", 5432))
|
||||
|
||||
from modules.connectors.connectorDbPostgre import _get_cached_connector
|
||||
self.db = _get_cached_connector(
|
||||
from modules.connectors.connectorDbPostgre import getCachedConnector
|
||||
self.db = getCachedConnector(
|
||||
dbHost=dbHost,
|
||||
dbDatabase=dbDatabase,
|
||||
dbUser=dbUser,
|
||||
|
|
|
|||
|
|
@ -35,17 +35,6 @@ from modules.features.chatbot.mainChatbot import getEventManager
|
|||
from modules.shared.i18nRegistry import apiRouteContext
|
||||
routeApiMsg = apiRouteContext("routeFeatureChatbot")
|
||||
|
||||
# Pre-warm AI connectors when this router loads (before first request).
|
||||
# Ensures connectors are ready; avoids 4–8 s delay on first chatbot message.
|
||||
try:
|
||||
import modules.aicore.aicoreModelRegistry # noqa: F401
|
||||
from modules.aicore.aicoreModelRegistry import modelRegistry
|
||||
modelRegistry.ensureConnectorsRegistered()
|
||||
modelRegistry.refreshModels(force=True)
|
||||
logging.getLogger(__name__).info("Chatbot router: AI connectors pre-warmed")
|
||||
except Exception as e:
|
||||
logging.getLogger(__name__).warning(f"Chatbot AI pre-warm failed: {e}")
|
||||
|
||||
# Configure logger
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -204,19 +193,20 @@ def get_chatbot_threads(
|
|||
normalized_wf["maxSteps"] = 10
|
||||
normalized_workflows.append(normalized_wf)
|
||||
|
||||
metadata = PaginationMetadata(
|
||||
currentPage=paginationParams.page if paginationParams else 1,
|
||||
pageSize=paginationParams.pageSize if paginationParams else len(workflows),
|
||||
totalItems=totalItems,
|
||||
totalPages=totalPages,
|
||||
sort=paginationParams.sort if paginationParams else [],
|
||||
filters=paginationParams.filters if paginationParams else None
|
||||
)
|
||||
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||
enriched = enrichRowsWithFkLabels(normalized_workflows, ChatbotConversation)
|
||||
|
||||
return PaginatedResponse(
|
||||
items=normalized_workflows,
|
||||
pagination=metadata
|
||||
)
|
||||
return {
|
||||
"items": enriched,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page if paginationParams else 1,
|
||||
pageSize=paginationParams.pageSize if paginationParams else len(workflows),
|
||||
totalItems=totalItems,
|
||||
totalPages=totalPages,
|
||||
sort=paginationParams.sort if paginationParams else [],
|
||||
filters=paginationParams.filters if paginationParams else None
|
||||
).model_dump(),
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
# All rights reserved.
|
||||
"""
|
||||
CommCoach Feature - Data Models.
|
||||
Pydantic models for coaching contexts, sessions, messages, tasks, scores, and user profiles.
|
||||
Pydantic models for training modules, sessions, messages, tasks, scores, and user profiles.
|
||||
"""
|
||||
from typing import Optional, List, Dict, Any
|
||||
from pydantic import BaseModel, Field
|
||||
|
|
@ -16,22 +16,18 @@ import uuid
|
|||
# Enums
|
||||
# ============================================================================
|
||||
|
||||
class CoachingContextStatus(str, Enum):
|
||||
class TrainingModuleStatus(str, Enum):
|
||||
ACTIVE = "active"
|
||||
PAUSED = "paused"
|
||||
ARCHIVED = "archived"
|
||||
COMPLETED = "completed"
|
||||
|
||||
|
||||
class CoachingContextCategory(str, Enum):
|
||||
LEADERSHIP = "leadership"
|
||||
CONFLICT = "conflict"
|
||||
NEGOTIATION = "negotiation"
|
||||
PRESENTATION = "presentation"
|
||||
FEEDBACK = "feedback"
|
||||
DELEGATION = "delegation"
|
||||
CHANGE_MANAGEMENT = "changeManagement"
|
||||
CUSTOM = "custom"
|
||||
class TrainingModuleType(str, Enum):
|
||||
COACHING = "coaching"
|
||||
TRAINING = "training"
|
||||
EXAM = "exam"
|
||||
ELEARNING = "elearning"
|
||||
|
||||
|
||||
class CoachingSessionStatus(str, Enum):
|
||||
|
|
@ -75,30 +71,32 @@ class CoachingScoreTrend(str, Enum):
|
|||
# Database Models
|
||||
# ============================================================================
|
||||
|
||||
class CoachingContext(PowerOnModel):
|
||||
"""A coaching context/dossier representing a topic the user is working on."""
|
||||
class TrainingModule(PowerOnModel):
|
||||
"""A training module representing a topic the user is working on."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
userId: str = Field(description="Owner user ID (strict ownership)")
|
||||
mandateId: str = Field(description="Mandate ID")
|
||||
instanceId: str = Field(description="Feature instance ID")
|
||||
title: str = Field(description="Context title, e.g. 'Conflict with team lead'")
|
||||
title: str = Field(description="Module title, e.g. 'Conflict with team lead'")
|
||||
description: Optional[str] = Field(default=None, description="Short description")
|
||||
category: CoachingContextCategory = Field(default=CoachingContextCategory.CUSTOM)
|
||||
status: CoachingContextStatus = Field(default=CoachingContextStatus.ACTIVE)
|
||||
goals: Optional[str] = Field(default=None, description="JSON array of goals [{id, text, status, createdAt}]")
|
||||
moduleType: TrainingModuleType = Field(default=TrainingModuleType.COACHING)
|
||||
status: TrainingModuleStatus = Field(default=TrainingModuleStatus.ACTIVE)
|
||||
goals: Optional[str] = Field(default=None, description="Free-text goal description")
|
||||
insights: Optional[str] = Field(default=None, description="JSON array of AI insights [{id, text, sessionId, createdAt}]")
|
||||
metadata: Optional[str] = Field(default=None, description="JSON object with flexible metadata")
|
||||
personaId: Optional[str] = Field(default=None, description="Default persona for sessions")
|
||||
kpiTargets: Optional[str] = Field(default=None, description="JSON object with structured KPI targets")
|
||||
sessionCount: int = Field(default=0)
|
||||
taskCount: int = Field(default=0)
|
||||
lastSessionAt: Optional[str] = Field(default=None)
|
||||
lastSessionAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
rollingOverview: Optional[str] = Field(default=None, description="AI summary of older sessions for long context history")
|
||||
rollingOverviewUpToSessionCount: Optional[int] = Field(default=None, description="Session count covered by rollingOverview")
|
||||
|
||||
|
||||
class CoachingSession(PowerOnModel):
|
||||
"""A single coaching conversation session within a context."""
|
||||
"""A single coaching conversation session within a module."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
contextId: str = Field(description="FK to CoachingContext")
|
||||
moduleId: str = Field(description="FK to TrainingModule")
|
||||
userId: str = Field(description="Owner user ID")
|
||||
mandateId: str = Field(description="Mandate ID")
|
||||
instanceId: str = Field(description="Feature instance ID")
|
||||
|
|
@ -113,15 +111,15 @@ class CoachingSession(PowerOnModel):
|
|||
messageCount: int = Field(default=0)
|
||||
competenceScore: Optional[float] = Field(default=None, ge=0.0, le=100.0)
|
||||
emailSent: bool = Field(default=False)
|
||||
startedAt: Optional[str] = Field(default=None)
|
||||
endedAt: Optional[str] = Field(default=None)
|
||||
startedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
endedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
|
||||
class CoachingMessage(PowerOnModel):
|
||||
"""A single message in a coaching session."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
sessionId: str = Field(description="FK to CoachingSession")
|
||||
contextId: str = Field(description="FK to CoachingContext")
|
||||
moduleId: str = Field(description="FK to TrainingModule")
|
||||
userId: str = Field(description="Owner user ID")
|
||||
role: CoachingMessageRole = Field(description="Message author role")
|
||||
content: str = Field(description="Message content (Markdown)")
|
||||
|
|
@ -131,9 +129,9 @@ class CoachingMessage(PowerOnModel):
|
|||
|
||||
|
||||
class CoachingTask(PowerOnModel):
|
||||
"""A task/checklist item assigned within a coaching context."""
|
||||
"""A task/checklist item assigned within a training module."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
contextId: str = Field(description="FK to CoachingContext")
|
||||
moduleId: str = Field(description="FK to TrainingModule")
|
||||
sessionId: Optional[str] = Field(default=None, description="FK to originating session")
|
||||
userId: str = Field(description="Owner user ID")
|
||||
mandateId: str = Field(description="Mandate ID")
|
||||
|
|
@ -141,14 +139,14 @@ class CoachingTask(PowerOnModel):
|
|||
description: Optional[str] = Field(default=None)
|
||||
status: CoachingTaskStatus = Field(default=CoachingTaskStatus.OPEN)
|
||||
priority: CoachingTaskPriority = Field(default=CoachingTaskPriority.MEDIUM)
|
||||
dueDate: Optional[str] = Field(default=None)
|
||||
completedAt: Optional[str] = Field(default=None)
|
||||
dueDate: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "date"})
|
||||
completedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
|
||||
class CoachingScore(PowerOnModel):
|
||||
"""A competence score for a dimension, recorded after a session."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
contextId: str = Field(description="FK to CoachingContext")
|
||||
moduleId: str = Field(description="FK to TrainingModule")
|
||||
sessionId: str = Field(description="FK to CoachingSession")
|
||||
userId: str = Field(description="Owner user ID")
|
||||
mandateId: str = Field(description="Mandate ID")
|
||||
|
|
@ -171,7 +169,7 @@ class CoachingUserProfile(PowerOnModel):
|
|||
longestStreak: int = Field(default=0)
|
||||
totalSessions: int = Field(default=0)
|
||||
totalMinutes: int = Field(default=0)
|
||||
lastSessionAt: Optional[str] = Field(default=None)
|
||||
lastSessionAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
|
||||
# ============================================================================
|
||||
|
|
@ -193,6 +191,22 @@ class CoachingPersona(PowerOnModel):
|
|||
isActive: bool = Field(default=True)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Module-Persona Mapping (M:N)
|
||||
# ============================================================================
|
||||
|
||||
class ModulePersonaMapping(PowerOnModel):
|
||||
"""Maps which personas are available for a specific training module."""
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
moduleId: str = Field(description="FK to TrainingModule")
|
||||
personaId: str = Field(description="FK to CoachingPersona")
|
||||
instanceId: str = Field(description="Feature instance ID")
|
||||
|
||||
|
||||
class SetModulePersonasRequest(BaseModel):
|
||||
personaIds: List[str] = Field(description="List of persona IDs to assign to this module")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Iteration 2: Badges / Gamification
|
||||
# ============================================================================
|
||||
|
|
@ -204,25 +218,29 @@ class CoachingBadge(PowerOnModel):
|
|||
mandateId: str = Field(description="Mandate ID")
|
||||
instanceId: str = Field(description="Feature instance ID")
|
||||
badgeKey: str = Field(description="Badge identifier, e.g. 'streak_7'")
|
||||
awardedAt: Optional[str] = Field(default=None)
|
||||
awardedAt: Optional[float] = Field(default=None, json_schema_extra={"frontend_type": "timestamp"})
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# API Request/Response Models
|
||||
# ============================================================================
|
||||
|
||||
class CreateContextRequest(BaseModel):
|
||||
title: str = Field(description="Context title")
|
||||
class CreateModuleRequest(BaseModel):
|
||||
title: str = Field(description="Module title")
|
||||
description: Optional[str] = None
|
||||
category: Optional[CoachingContextCategory] = CoachingContextCategory.CUSTOM
|
||||
goals: Optional[List[str]] = None
|
||||
moduleType: Optional[TrainingModuleType] = TrainingModuleType.COACHING
|
||||
goals: Optional[str] = None
|
||||
personaId: Optional[str] = None
|
||||
kpiTargets: Optional[str] = None
|
||||
|
||||
|
||||
class UpdateContextRequest(BaseModel):
|
||||
class UpdateModuleRequest(BaseModel):
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
category: Optional[CoachingContextCategory] = None
|
||||
moduleType: Optional[TrainingModuleType] = None
|
||||
goals: Optional[str] = None
|
||||
personaId: Optional[str] = None
|
||||
kpiTargets: Optional[str] = None
|
||||
|
||||
|
||||
class SendMessageRequest(BaseModel):
|
||||
|
|
@ -238,14 +256,14 @@ class CreateTaskRequest(BaseModel):
|
|||
title: str
|
||||
description: Optional[str] = None
|
||||
priority: Optional[CoachingTaskPriority] = CoachingTaskPriority.MEDIUM
|
||||
dueDate: Optional[str] = None
|
||||
dueDate: Optional[float] = None
|
||||
|
||||
|
||||
class UpdateTaskRequest(BaseModel):
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
priority: Optional[CoachingTaskPriority] = None
|
||||
dueDate: Optional[str] = None
|
||||
dueDate: Optional[float] = None
|
||||
|
||||
|
||||
class UpdateTaskStatusRequest(BaseModel):
|
||||
|
|
@ -279,8 +297,8 @@ class UpdatePersonaRequest(BaseModel):
|
|||
|
||||
class DashboardData(BaseModel):
|
||||
"""Aggregated dashboard data for the user."""
|
||||
totalContexts: int = 0
|
||||
activeContexts: int = 0
|
||||
totalModules: int = 0
|
||||
activeModules: int = 0
|
||||
totalSessions: int = 0
|
||||
totalMinutes: int = 0
|
||||
streakDays: int = 0
|
||||
|
|
@ -289,4 +307,4 @@ class DashboardData(BaseModel):
|
|||
recentScores: List[Dict[str, Any]] = Field(default_factory=list)
|
||||
openTasks: int = 0
|
||||
completedTasks: int = 0
|
||||
contexts: List[Dict[str, Any]] = Field(default_factory=list)
|
||||
modules: List[Dict[str, Any]] = Field(default_factory=list)
|
||||
|
|
|
|||
|
|
@ -12,12 +12,12 @@ from typing import Dict, Any, List, Optional
|
|||
from modules.datamodels.datamodelUam import User
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
from modules.shared.dbRegistry import registerDatabase
|
||||
from modules.shared.timeUtils import getIsoTimestamp
|
||||
from modules.shared.timeUtils import getIsoTimestamp, getUtcTimestamp
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
from modules.shared.i18nRegistry import resolveText, t
|
||||
|
||||
from .datamodelCommcoach import (
|
||||
CoachingContext, CoachingContextStatus,
|
||||
TrainingModule, TrainingModuleStatus,
|
||||
CoachingSession, CoachingSessionStatus,
|
||||
CoachingMessage,
|
||||
CoachingTask, CoachingTaskStatus,
|
||||
|
|
@ -70,74 +70,87 @@ class CommcoachObjects:
|
|||
)
|
||||
|
||||
# =========================================================================
|
||||
# Contexts
|
||||
# Modules (formerly Contexts)
|
||||
# =========================================================================
|
||||
|
||||
def getContexts(self, instanceId: str, userId: str, includeArchived: bool = False) -> List[Dict[str, Any]]:
|
||||
"""Get all coaching contexts for a user. Strict ownership."""
|
||||
def getModules(self, instanceId: str, userId: str, includeArchived: bool = False) -> List[Dict[str, Any]]:
|
||||
"""Get all training modules for a user. Enriches with live sessionCount from sessions table."""
|
||||
records = self.db.getRecordset(
|
||||
CoachingContext,
|
||||
TrainingModule,
|
||||
recordFilter={"instanceId": instanceId, "userId": userId},
|
||||
)
|
||||
if not includeArchived:
|
||||
records = [r for r in records if r.get("status") != CoachingContextStatus.ARCHIVED.value]
|
||||
records = [r for r in records if r.get("status") != TrainingModuleStatus.ARCHIVED.value]
|
||||
|
||||
allSessions = self.db.getRecordset(
|
||||
CoachingSession,
|
||||
recordFilter={"instanceId": instanceId, "userId": userId},
|
||||
)
|
||||
countByModule: Dict[str, int] = {}
|
||||
for s in allSessions:
|
||||
mid = s.get("moduleId")
|
||||
if mid:
|
||||
countByModule[mid] = countByModule.get(mid, 0) + 1
|
||||
for r in records:
|
||||
r["sessionCount"] = countByModule.get(r.get("id", ""), 0)
|
||||
|
||||
records.sort(key=lambda r: r.get("updatedAt") or r.get("createdAt") or "", reverse=True)
|
||||
return records
|
||||
|
||||
def getContext(self, contextId: str) -> Optional[Dict[str, Any]]:
|
||||
records = self.db.getRecordset(CoachingContext, recordFilter={"id": contextId})
|
||||
def getModule(self, moduleId: str) -> Optional[Dict[str, Any]]:
|
||||
records = self.db.getRecordset(TrainingModule, recordFilter={"id": moduleId})
|
||||
return records[0] if records else None
|
||||
|
||||
def createContext(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def createModule(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
data["createdAt"] = getIsoTimestamp()
|
||||
data["updatedAt"] = getIsoTimestamp()
|
||||
return self.db.recordCreate(CoachingContext, data)
|
||||
return self.db.recordCreate(TrainingModule, data)
|
||||
|
||||
def updateContext(self, contextId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
def updateModule(self, moduleId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
updates["updatedAt"] = getIsoTimestamp()
|
||||
return self.db.recordModify(CoachingContext, contextId, updates)
|
||||
return self.db.recordModify(TrainingModule, moduleId, updates)
|
||||
|
||||
def deleteContext(self, contextId: str) -> bool:
|
||||
self._deleteSessionsByContext(contextId)
|
||||
self._deleteTasksByContext(contextId)
|
||||
self._deleteScoresByContext(contextId)
|
||||
return self.db.recordDelete(CoachingContext, contextId)
|
||||
def deleteModule(self, moduleId: str) -> bool:
|
||||
self._deleteSessionsByModule(moduleId)
|
||||
self._deleteTasksByModule(moduleId)
|
||||
self._deleteScoresByModule(moduleId)
|
||||
return self.db.recordDelete(TrainingModule, moduleId)
|
||||
|
||||
# =========================================================================
|
||||
# Sessions
|
||||
# =========================================================================
|
||||
|
||||
def getSessions(self, contextId: str, userId: str) -> List[Dict[str, Any]]:
|
||||
def getSessions(self, moduleId: str, userId: str) -> List[Dict[str, Any]]:
|
||||
records = self.db.getRecordset(
|
||||
CoachingSession,
|
||||
recordFilter={"contextId": contextId, "userId": userId},
|
||||
recordFilter={"moduleId": moduleId, "userId": userId},
|
||||
)
|
||||
records.sort(key=lambda r: r.get("startedAt") or r.get("createdAt") or "", reverse=True)
|
||||
records.sort(key=lambda r: r.get("startedAt") or 0, reverse=True)
|
||||
return records
|
||||
|
||||
def getSession(self, sessionId: str) -> Optional[Dict[str, Any]]:
|
||||
records = self.db.getRecordset(CoachingSession, recordFilter={"id": sessionId})
|
||||
return records[0] if records else None
|
||||
|
||||
def getActiveSession(self, contextId: str, userId: str) -> Optional[Dict[str, Any]]:
|
||||
def getActiveSession(self, moduleId: str, userId: str) -> Optional[Dict[str, Any]]:
|
||||
records = self.db.getRecordset(
|
||||
CoachingSession,
|
||||
recordFilter={"contextId": contextId, "userId": userId, "status": CoachingSessionStatus.ACTIVE.value},
|
||||
recordFilter={"moduleId": moduleId, "userId": userId, "status": CoachingSessionStatus.ACTIVE.value},
|
||||
)
|
||||
return records[0] if records else None
|
||||
|
||||
def createSession(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
data["createdAt"] = getIsoTimestamp()
|
||||
data["updatedAt"] = getIsoTimestamp()
|
||||
data["startedAt"] = getIsoTimestamp()
|
||||
data["startedAt"] = getUtcTimestamp()
|
||||
return self.db.recordCreate(CoachingSession, data)
|
||||
|
||||
def updateSession(self, sessionId: str, updates: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
updates["updatedAt"] = getIsoTimestamp()
|
||||
return self.db.recordModify(CoachingSession, sessionId, updates)
|
||||
|
||||
def _deleteSessionsByContext(self, contextId: str) -> int:
|
||||
records = self.db.getRecordset(CoachingSession, recordFilter={"contextId": contextId})
|
||||
def _deleteSessionsByModule(self, moduleId: str) -> int:
|
||||
records = self.db.getRecordset(CoachingSession, recordFilter={"moduleId": moduleId})
|
||||
count = 0
|
||||
for record in records:
|
||||
self._deleteMessagesBySession(record.get("id"))
|
||||
|
|
@ -174,10 +187,10 @@ class CommcoachObjects:
|
|||
# Tasks
|
||||
# =========================================================================
|
||||
|
||||
def getTasks(self, contextId: str, userId: str) -> List[Dict[str, Any]]:
|
||||
def getTasks(self, moduleId: str, userId: str) -> List[Dict[str, Any]]:
|
||||
records = self.db.getRecordset(
|
||||
CoachingTask,
|
||||
recordFilter={"contextId": contextId, "userId": userId},
|
||||
recordFilter={"moduleId": moduleId, "userId": userId},
|
||||
)
|
||||
records.sort(key=lambda r: r.get("createdAt") or "", reverse=True)
|
||||
return records
|
||||
|
|
@ -198,8 +211,8 @@ class CommcoachObjects:
|
|||
def deleteTask(self, taskId: str) -> bool:
|
||||
return self.db.recordDelete(CoachingTask, taskId)
|
||||
|
||||
def _deleteTasksByContext(self, contextId: str) -> int:
|
||||
records = self.db.getRecordset(CoachingTask, recordFilter={"contextId": contextId})
|
||||
def _deleteTasksByModule(self, moduleId: str) -> int:
|
||||
records = self.db.getRecordset(CoachingTask, recordFilter={"moduleId": moduleId})
|
||||
count = 0
|
||||
for record in records:
|
||||
self.db.recordDelete(CoachingTask, record.get("id"))
|
||||
|
|
@ -218,10 +231,10 @@ class CommcoachObjects:
|
|||
# Scores
|
||||
# =========================================================================
|
||||
|
||||
def getScores(self, contextId: str, userId: str) -> List[Dict[str, Any]]:
|
||||
def getScores(self, moduleId: str, userId: str) -> List[Dict[str, Any]]:
|
||||
records = self.db.getRecordset(
|
||||
CoachingScore,
|
||||
recordFilter={"contextId": contextId, "userId": userId},
|
||||
recordFilter={"moduleId": moduleId, "userId": userId},
|
||||
)
|
||||
records.sort(key=lambda r: r.get("createdAt") or "")
|
||||
return records
|
||||
|
|
@ -235,8 +248,8 @@ class CommcoachObjects:
|
|||
data["createdAt"] = getIsoTimestamp()
|
||||
return self.db.recordCreate(CoachingScore, data)
|
||||
|
||||
def _deleteScoresByContext(self, contextId: str) -> int:
|
||||
records = self.db.getRecordset(CoachingScore, recordFilter={"contextId": contextId})
|
||||
def _deleteScoresByModule(self, moduleId: str) -> int:
|
||||
records = self.db.getRecordset(CoachingScore, recordFilter={"moduleId": moduleId})
|
||||
count = 0
|
||||
for record in records:
|
||||
self.db.recordDelete(CoachingScore, record.get("id"))
|
||||
|
|
@ -274,6 +287,39 @@ class CommcoachObjects:
|
|||
from .datamodelCommcoach import CoachingPersona
|
||||
return self.db.recordDelete(CoachingPersona, personaId)
|
||||
|
||||
def getAllPersonas(self, instanceId: str) -> List[Dict[str, Any]]:
|
||||
"""All personas (builtin + custom for this instance), including inactive."""
|
||||
from .datamodelCommcoach import CoachingPersona
|
||||
builtins = self.db.getRecordset(CoachingPersona, recordFilter={"userId": "system"})
|
||||
custom = self.db.getRecordset(CoachingPersona, recordFilter={"instanceId": instanceId})
|
||||
custom = [p for p in custom if p.get("userId") != "system"]
|
||||
return builtins + custom
|
||||
|
||||
# =========================================================================
|
||||
# Module-Persona Mapping
|
||||
# =========================================================================
|
||||
|
||||
def getModulePersonas(self, moduleId: str) -> List[Dict[str, Any]]:
|
||||
from .datamodelCommcoach import ModulePersonaMapping
|
||||
return self.db.getRecordset(ModulePersonaMapping, recordFilter={"moduleId": moduleId})
|
||||
|
||||
def setModulePersonas(self, moduleId: str, personaIds: List[str], instanceId: str) -> List[Dict[str, Any]]:
|
||||
from .datamodelCommcoach import ModulePersonaMapping
|
||||
existing = self.db.getRecordset(ModulePersonaMapping, recordFilter={"moduleId": moduleId})
|
||||
for rec in existing:
|
||||
self.db.recordDelete(ModulePersonaMapping, rec["id"])
|
||||
created = []
|
||||
for pId in personaIds:
|
||||
data = ModulePersonaMapping(
|
||||
moduleId=moduleId,
|
||||
personaId=pId,
|
||||
instanceId=instanceId,
|
||||
).model_dump()
|
||||
data["createdAt"] = getIsoTimestamp()
|
||||
data["updatedAt"] = getIsoTimestamp()
|
||||
created.append(self.db.recordCreate(ModulePersonaMapping, data))
|
||||
return created
|
||||
|
||||
# =========================================================================
|
||||
# Badges
|
||||
# =========================================================================
|
||||
|
|
@ -281,7 +327,7 @@ class CommcoachObjects:
|
|||
def getBadges(self, userId: str, instanceId: str) -> List[Dict[str, Any]]:
|
||||
from .datamodelCommcoach import CoachingBadge
|
||||
records = self.db.getRecordset(CoachingBadge, recordFilter={"userId": userId, "instanceId": instanceId})
|
||||
records.sort(key=lambda r: r.get("awardedAt") or "", reverse=True)
|
||||
records.sort(key=lambda r: r.get("awardedAt") or 0, reverse=True)
|
||||
return records
|
||||
|
||||
def hasBadge(self, userId: str, instanceId: str, badgeKey: str) -> bool:
|
||||
|
|
@ -291,7 +337,7 @@ class CommcoachObjects:
|
|||
|
||||
def awardBadge(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
from .datamodelCommcoach import CoachingBadge
|
||||
data["awardedAt"] = getIsoTimestamp()
|
||||
data["awardedAt"] = getUtcTimestamp()
|
||||
data["createdAt"] = getIsoTimestamp()
|
||||
return self.db.recordCreate(CoachingBadge, data)
|
||||
|
||||
|
|
@ -299,8 +345,8 @@ class CommcoachObjects:
|
|||
# Score History
|
||||
# =========================================================================
|
||||
|
||||
def getScoreHistory(self, contextId: str, userId: str) -> Dict[str, List[Dict[str, Any]]]:
|
||||
scores = self.getScores(contextId, userId)
|
||||
def getScoreHistory(self, moduleId: str, userId: str) -> Dict[str, List[Dict[str, Any]]]:
|
||||
scores = self.getScores(moduleId, userId)
|
||||
history: Dict[str, List[Dict[str, Any]]] = {}
|
||||
for s in scores:
|
||||
dim = s.get("dimension", "unknown")
|
||||
|
|
@ -344,16 +390,15 @@ class CommcoachObjects:
|
|||
# =========================================================================
|
||||
|
||||
def getDashboardData(self, userId: str, instanceId: str) -> Dict[str, Any]:
|
||||
contexts = self.db.getRecordset(CoachingContext, recordFilter={"userId": userId, "instanceId": instanceId})
|
||||
modules = self.db.getRecordset(TrainingModule, recordFilter={"userId": userId, "instanceId": instanceId})
|
||||
sessions = self.db.getRecordset(CoachingSession, recordFilter={"userId": userId, "instanceId": instanceId})
|
||||
profile = self.getProfile(userId, instanceId)
|
||||
|
||||
activeContexts = [c for c in contexts if c.get("status") == CoachingContextStatus.ACTIVE.value]
|
||||
completedSessions = [s for s in sessions if s.get("status") == CoachingSessionStatus.COMPLETED.value]
|
||||
activeModules = [m for m in modules if m.get("status") == TrainingModuleStatus.ACTIVE.value]
|
||||
|
||||
totalMinutes = sum(s.get("durationSeconds", 0) for s in completedSessions) // 60
|
||||
totalMinutes = sum(s.get("durationSeconds", 0) for s in sessions) // 60
|
||||
scores = []
|
||||
for s in completedSessions:
|
||||
for s in sessions:
|
||||
raw = s.get("competenceScore")
|
||||
if raw is not None:
|
||||
try:
|
||||
|
|
@ -364,29 +409,27 @@ class CommcoachObjects:
|
|||
|
||||
recentScores = self.getRecentScores(userId, limit=10)
|
||||
|
||||
contextSummaries = []
|
||||
for ctx in activeContexts:
|
||||
goalProgress = _calcGoalProgress(ctx.get("goals"))
|
||||
contextSummaries.append({
|
||||
"id": ctx.get("id"),
|
||||
"title": ctx.get("title"),
|
||||
"category": ctx.get("category"),
|
||||
"sessionCount": ctx.get("sessionCount", 0),
|
||||
"lastSessionAt": ctx.get("lastSessionAt"),
|
||||
"goalProgress": goalProgress,
|
||||
countByModule: Dict[str, int] = {}
|
||||
for s in sessions:
|
||||
mid = s.get("moduleId")
|
||||
if mid:
|
||||
countByModule[mid] = countByModule.get(mid, 0) + 1
|
||||
|
||||
moduleSummaries = []
|
||||
for mod in activeModules:
|
||||
modId = mod.get("id", "")
|
||||
moduleSummaries.append({
|
||||
"id": modId,
|
||||
"title": mod.get("title"),
|
||||
"moduleType": mod.get("moduleType"),
|
||||
"sessionCount": countByModule.get(modId, 0),
|
||||
"lastSessionAt": mod.get("lastSessionAt"),
|
||||
})
|
||||
|
||||
allGoalProgress = []
|
||||
for ctx in activeContexts:
|
||||
gp = _calcGoalProgress(ctx.get("goals"))
|
||||
if gp is not None:
|
||||
allGoalProgress.append(gp)
|
||||
overallGoalProgress = round(sum(allGoalProgress) / len(allGoalProgress)) if allGoalProgress else None
|
||||
|
||||
return {
|
||||
"totalContexts": len(contexts),
|
||||
"activeContexts": len(activeContexts),
|
||||
"totalSessions": len(completedSessions),
|
||||
"totalModules": len(modules),
|
||||
"activeModules": len(activeModules),
|
||||
"totalSessions": len(sessions),
|
||||
"totalMinutes": totalMinutes,
|
||||
"streakDays": profile.get("streakDays", 0) if profile else 0,
|
||||
"longestStreak": profile.get("longestStreak", 0) if profile else 0,
|
||||
|
|
@ -394,29 +437,12 @@ class CommcoachObjects:
|
|||
"recentScores": recentScores,
|
||||
"openTasks": self.getOpenTaskCount(userId, instanceId),
|
||||
"completedTasks": self.getCompletedTaskCount(userId, instanceId),
|
||||
"contexts": contextSummaries,
|
||||
"goalProgress": overallGoalProgress,
|
||||
"modules": moduleSummaries,
|
||||
"badges": self.getBadges(userId, instanceId),
|
||||
"level": _calcLevel(profile.get("totalSessions", 0) if profile else 0),
|
||||
"level": _calcLevel(len(sessions)),
|
||||
}
|
||||
|
||||
|
||||
def _calcGoalProgress(goalsRaw) -> Optional[int]:
|
||||
"""Calculate goal completion percentage from a context's goals JSON field."""
|
||||
if not goalsRaw:
|
||||
return None
|
||||
goals = goalsRaw
|
||||
if isinstance(goalsRaw, str):
|
||||
try:
|
||||
goals = json.loads(goalsRaw)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return None
|
||||
if not isinstance(goals, list) or len(goals) == 0:
|
||||
return None
|
||||
done = sum(1 for g in goals if isinstance(g, dict) and g.get("status") in ("done", "completed"))
|
||||
return round(done / len(goals) * 100)
|
||||
|
||||
|
||||
_LEVELS = [
|
||||
(50, 5, "master", "Meister"),
|
||||
(25, 4, "expert", "Experte"),
|
||||
|
|
|
|||
|
|
@ -23,9 +23,19 @@ UI_OBJECTS = [
|
|||
"meta": {"area": "dashboard"}
|
||||
},
|
||||
{
|
||||
"objectKey": "ui.feature.commcoach.coaching",
|
||||
"label": t("Arbeitsthemen", context="UI"),
|
||||
"meta": {"area": "coaching"}
|
||||
"objectKey": "ui.feature.commcoach.assistant",
|
||||
"label": t("Assistent", context="UI"),
|
||||
"meta": {"area": "assistant"}
|
||||
},
|
||||
{
|
||||
"objectKey": "ui.feature.commcoach.modules",
|
||||
"label": t("Module", context="UI"),
|
||||
"meta": {"area": "modules"}
|
||||
},
|
||||
{
|
||||
"objectKey": "ui.feature.commcoach.session",
|
||||
"label": t("Session", context="UI"),
|
||||
"meta": {"area": "session"}
|
||||
},
|
||||
{
|
||||
"objectKey": "ui.feature.commcoach.settings",
|
||||
|
|
@ -35,15 +45,15 @@ UI_OBJECTS = [
|
|||
]
|
||||
|
||||
DATA_OBJECTS = [
|
||||
# ── Record-Hierarchie: Context → Session → Message/Score, Context → Task ──
|
||||
# ── Record-Hierarchie: TrainingModule → Session → Message/Score, TrainingModule → Task ──
|
||||
{
|
||||
"objectKey": "data.feature.commcoach.CoachingContext",
|
||||
"label": t("Coaching-Kontext", context="UI"),
|
||||
"objectKey": "data.feature.commcoach.TrainingModule",
|
||||
"label": t("Trainings-Modul", context="UI"),
|
||||
"meta": {
|
||||
"table": "CoachingContext",
|
||||
"fields": ["id", "title", "category", "status", "lastSessionAt"],
|
||||
"table": "TrainingModule",
|
||||
"fields": ["id", "title", "moduleType", "status", "lastSessionAt"],
|
||||
"isParent": True,
|
||||
"displayFields": ["title", "category", "status"],
|
||||
"displayFields": ["title", "moduleType", "status"],
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
@ -51,10 +61,10 @@ DATA_OBJECTS = [
|
|||
"label": t("Coaching-Session", context="UI"),
|
||||
"meta": {
|
||||
"table": "CoachingSession",
|
||||
"fields": ["id", "contextId", "status", "summary", "startedAt", "endedAt", "competenceScore"],
|
||||
"fields": ["id", "moduleId", "status", "summary", "startedAt", "endedAt", "competenceScore"],
|
||||
"isParent": True,
|
||||
"parentTable": "CoachingContext",
|
||||
"parentKey": "contextId",
|
||||
"parentTable": "TrainingModule",
|
||||
"parentKey": "moduleId",
|
||||
"displayFields": ["startedAt", "status"],
|
||||
}
|
||||
},
|
||||
|
|
@ -63,7 +73,7 @@ DATA_OBJECTS = [
|
|||
"label": t("Coaching-Nachricht", context="UI"),
|
||||
"meta": {
|
||||
"table": "CoachingMessage",
|
||||
"fields": ["id", "sessionId", "contextId", "role", "content", "contentType"],
|
||||
"fields": ["id", "sessionId", "moduleId", "role", "content", "contentType"],
|
||||
"parentTable": "CoachingSession",
|
||||
"parentKey": "sessionId",
|
||||
}
|
||||
|
|
@ -73,7 +83,7 @@ DATA_OBJECTS = [
|
|||
"label": t("Coaching-Score", context="UI"),
|
||||
"meta": {
|
||||
"table": "CoachingScore",
|
||||
"fields": ["id", "sessionId", "contextId", "dimension", "score", "trend"],
|
||||
"fields": ["id", "sessionId", "moduleId", "dimension", "score", "trend"],
|
||||
"parentTable": "CoachingSession",
|
||||
"parentKey": "sessionId",
|
||||
}
|
||||
|
|
@ -83,9 +93,9 @@ DATA_OBJECTS = [
|
|||
"label": t("Coaching-Aufgabe", context="UI"),
|
||||
"meta": {
|
||||
"table": "CoachingTask",
|
||||
"fields": ["id", "contextId", "title", "status", "priority", "dueDate"],
|
||||
"parentTable": "CoachingContext",
|
||||
"parentKey": "contextId",
|
||||
"fields": ["id", "moduleId", "title", "status", "priority", "dueDate"],
|
||||
"parentTable": "TrainingModule",
|
||||
"parentKey": "moduleId",
|
||||
}
|
||||
},
|
||||
# ── Stammdaten (sessionübergreifend, scoped per userId) ──────────────────
|
||||
|
|
@ -112,6 +122,15 @@ DATA_OBJECTS = [
|
|||
"fields": ["id", "key", "label", "gender", "category"],
|
||||
}
|
||||
},
|
||||
{
|
||||
"objectKey": "data.feature.commcoach.ModulePersonaMapping",
|
||||
"label": t("Modul-Persona-Zuordnung", context="UI"),
|
||||
"meta": {
|
||||
"table": "ModulePersonaMapping",
|
||||
"group": "data.feature.commcoach.userData",
|
||||
"fields": ["id", "moduleId", "personaId", "instanceId"],
|
||||
}
|
||||
},
|
||||
{
|
||||
"objectKey": "data.feature.commcoach.CoachingBadge",
|
||||
"label": t("Coaching-Auszeichnung", context="UI"),
|
||||
|
|
@ -130,19 +149,19 @@ DATA_OBJECTS = [
|
|||
|
||||
RESOURCE_OBJECTS = [
|
||||
{
|
||||
"objectKey": "resource.feature.commcoach.context.create",
|
||||
"label": t("Kontext erstellen", context="UI"),
|
||||
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts", "method": "POST"}
|
||||
"objectKey": "resource.feature.commcoach.module.create",
|
||||
"label": t("Modul erstellen", context="UI"),
|
||||
"meta": {"endpoint": "/api/commcoach/{instanceId}/modules", "method": "POST"}
|
||||
},
|
||||
{
|
||||
"objectKey": "resource.feature.commcoach.context.archive",
|
||||
"label": t("Kontext archivieren", context="UI"),
|
||||
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts/{contextId}/archive", "method": "POST"}
|
||||
"objectKey": "resource.feature.commcoach.module.archive",
|
||||
"label": t("Modul archivieren", context="UI"),
|
||||
"meta": {"endpoint": "/api/commcoach/{instanceId}/modules/{moduleId}/archive", "method": "POST"}
|
||||
},
|
||||
{
|
||||
"objectKey": "resource.feature.commcoach.session.start",
|
||||
"label": t("Session starten", context="UI"),
|
||||
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts/{contextId}/sessions/start", "method": "POST"}
|
||||
"meta": {"endpoint": "/api/commcoach/{instanceId}/modules/{moduleId}/sessions/start", "method": "POST"}
|
||||
},
|
||||
{
|
||||
"objectKey": "resource.feature.commcoach.session.complete",
|
||||
|
|
@ -152,7 +171,17 @@ RESOURCE_OBJECTS = [
|
|||
{
|
||||
"objectKey": "resource.feature.commcoach.task.manage",
|
||||
"label": t("Aufgaben verwalten", context="UI"),
|
||||
"meta": {"endpoint": "/api/commcoach/{instanceId}/contexts/{contextId}/tasks", "method": "POST"}
|
||||
"meta": {"endpoint": "/api/commcoach/{instanceId}/modules/{moduleId}/tasks", "method": "POST"}
|
||||
},
|
||||
{
|
||||
"objectKey": "resource.feature.commcoach.persona.manage",
|
||||
"label": t("Persona verwalten", context="UI"),
|
||||
"meta": {"endpoint": "/api/commcoach/{instanceId}/personas", "method": "POST"}
|
||||
},
|
||||
{
|
||||
"objectKey": "resource.feature.commcoach.modulePersonas.manage",
|
||||
"label": t("Modul-Persona-Zuordnung verwalten", context="UI"),
|
||||
"meta": {"endpoint": "/api/commcoach/{instanceId}/modules/{moduleId}/personas", "method": "PUT"}
|
||||
},
|
||||
]
|
||||
|
||||
|
|
@ -162,28 +191,31 @@ TEMPLATE_ROLES = [
|
|||
"description": "Kommunikations-Coach Betrachter - Coaching-Daten ansehen (nur lesen)",
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": "ui.feature.commcoach.dashboard", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.commcoach.coaching", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.commcoach.assistant", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.commcoach.modules", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.commcoach.session", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.commcoach.settings", "view": True},
|
||||
{"context": "DATA", "item": None, "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
|
||||
# Viewer: keine RESOURCE-Endpunkte (Mutationen); Regel explizit fuer konsistente Kontext-Matrix
|
||||
{"context": "RESOURCE", "item": None, "view": False},
|
||||
],
|
||||
},
|
||||
{
|
||||
"roleLabel": "commcoach-user",
|
||||
"description": "Kommunikations-Coach Benutzer - Kann eigene Coaching-Kontexte und Sessions verwalten",
|
||||
"description": "Kommunikations-Coach Benutzer - Kann eigene Coaching-Module und Sessions verwalten",
|
||||
"accessRules": [
|
||||
{"context": "UI", "item": "ui.feature.commcoach.dashboard", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.commcoach.coaching", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.commcoach.assistant", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.commcoach.modules", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.commcoach.session", "view": True},
|
||||
{"context": "UI", "item": "ui.feature.commcoach.settings", "view": True},
|
||||
{"context": "DATA", "item": "data.feature.commcoach.CoachingContext", "view": True, "read": "m", "create": "m", "update": "m", "delete": "m"},
|
||||
{"context": "DATA", "item": "data.feature.commcoach.TrainingModule", "view": True, "read": "m", "create": "m", "update": "m", "delete": "m"},
|
||||
{"context": "DATA", "item": "data.feature.commcoach.CoachingSession", "view": True, "read": "m", "create": "m", "update": "m", "delete": "n"},
|
||||
{"context": "DATA", "item": "data.feature.commcoach.CoachingMessage", "view": True, "read": "m", "create": "m", "update": "n", "delete": "n"},
|
||||
{"context": "DATA", "item": "data.feature.commcoach.CoachingTask", "view": True, "read": "m", "create": "m", "update": "m", "delete": "m"},
|
||||
{"context": "DATA", "item": "data.feature.commcoach.CoachingScore", "view": True, "read": "m", "create": "n", "update": "n", "delete": "n"},
|
||||
{"context": "DATA", "item": "data.feature.commcoach.CoachingUserProfile", "view": True, "read": "m", "create": "m", "update": "m", "delete": "n"},
|
||||
{"context": "RESOURCE", "item": "resource.feature.commcoach.context.create", "view": True},
|
||||
{"context": "RESOURCE", "item": "resource.feature.commcoach.context.archive", "view": True},
|
||||
{"context": "RESOURCE", "item": "resource.feature.commcoach.module.create", "view": True},
|
||||
{"context": "RESOURCE", "item": "resource.feature.commcoach.module.archive", "view": True},
|
||||
{"context": "RESOURCE", "item": "resource.feature.commcoach.session.start", "view": True},
|
||||
{"context": "RESOURCE", "item": "resource.feature.commcoach.session.complete", "view": True},
|
||||
{"context": "RESOURCE", "item": "resource.feature.commcoach.task.manage", "view": True},
|
||||
|
|
@ -252,6 +284,7 @@ def registerFeature(catalogService) -> bool:
|
|||
meta=dataObj.get("meta")
|
||||
)
|
||||
|
||||
_runMigrations()
|
||||
_syncTemplateRolesToDb()
|
||||
_seedBuiltinPersonas()
|
||||
_registerScheduler()
|
||||
|
|
@ -264,6 +297,135 @@ def registerFeature(catalogService) -> bool:
|
|||
return False
|
||||
|
||||
|
||||
def _runMigrations():
|
||||
"""Idempotent DB migrations for CommCoach feature.
|
||||
Runs on every bootstrap; each step checks preconditions before executing.
|
||||
"""
|
||||
try:
|
||||
from .interfaceFeatureCommcoach import commcoachDatabase
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
import psycopg2
|
||||
from psycopg2.extras import RealDictCursor
|
||||
|
||||
conn = psycopg2.connect(
|
||||
host=APP_CONFIG.get("DB_HOST", "localhost"),
|
||||
database=commcoachDatabase,
|
||||
user=APP_CONFIG.get("DB_USER"),
|
||||
password=APP_CONFIG.get("DB_PASSWORD_SECRET"),
|
||||
port=int(APP_CONFIG.get("DB_PORT", 5432)),
|
||||
cursor_factory=RealDictCursor,
|
||||
)
|
||||
conn.autocommit = False
|
||||
cur = conn.cursor()
|
||||
|
||||
def _tableExists(name):
|
||||
cur.execute(
|
||||
"SELECT 1 FROM information_schema.tables WHERE LOWER(table_name) = LOWER(%s) AND table_schema = 'public'",
|
||||
(name,),
|
||||
)
|
||||
return cur.fetchone() is not None
|
||||
|
||||
def _columnExists(table, column):
|
||||
cur.execute(
|
||||
"SELECT 1 FROM information_schema.columns WHERE LOWER(table_name) = LOWER(%s) AND LOWER(column_name) = LOWER(%s) AND table_schema = 'public'",
|
||||
(table, column),
|
||||
)
|
||||
return cur.fetchone() is not None
|
||||
|
||||
migrated = False
|
||||
|
||||
# M1: Rename table CoachingContext -> TrainingModule
|
||||
if _tableExists("CoachingContext") and not _tableExists("TrainingModule"):
|
||||
cur.execute('ALTER TABLE "CoachingContext" RENAME TO "TrainingModule"')
|
||||
logger.info("Migration M1: Renamed table CoachingContext -> TrainingModule")
|
||||
migrated = True
|
||||
|
||||
# M2: Rename contextId -> moduleId on child tables
|
||||
for childTable in ["CoachingSession", "CoachingMessage", "CoachingTask", "CoachingScore"]:
|
||||
if _tableExists(childTable) and _columnExists(childTable, "contextId") and not _columnExists(childTable, "moduleId"):
|
||||
cur.execute(f'ALTER TABLE "{childTable}" RENAME COLUMN "contextId" TO "moduleId"')
|
||||
logger.info(f"Migration M2: Renamed contextId -> moduleId on {childTable}")
|
||||
migrated = True
|
||||
|
||||
# M3: Add moduleType column with default 'coaching'
|
||||
if _tableExists("TrainingModule") and not _columnExists("TrainingModule", "moduleType"):
|
||||
cur.execute('ALTER TABLE "TrainingModule" ADD COLUMN "moduleType" TEXT DEFAULT \'coaching\'')
|
||||
cur.execute('UPDATE "TrainingModule" SET "moduleType" = \'coaching\' WHERE "moduleType" IS NULL')
|
||||
logger.info("Migration M3: Added moduleType column to TrainingModule")
|
||||
migrated = True
|
||||
|
||||
# M4: Add personaId column
|
||||
if _tableExists("TrainingModule") and not _columnExists("TrainingModule", "personaId"):
|
||||
cur.execute('ALTER TABLE "TrainingModule" ADD COLUMN "personaId" TEXT')
|
||||
logger.info("Migration M4: Added personaId column to TrainingModule")
|
||||
migrated = True
|
||||
|
||||
# M5: Add kpiTargets column
|
||||
if _tableExists("TrainingModule") and not _columnExists("TrainingModule", "kpiTargets"):
|
||||
cur.execute('ALTER TABLE "TrainingModule" ADD COLUMN "kpiTargets" TEXT')
|
||||
logger.info("Migration M5: Added kpiTargets column to TrainingModule")
|
||||
migrated = True
|
||||
|
||||
# M6: Drop category column (replaced by moduleType)
|
||||
if _tableExists("TrainingModule") and _columnExists("TrainingModule", "category"):
|
||||
cur.execute('ALTER TABLE "TrainingModule" DROP COLUMN "category"')
|
||||
logger.info("Migration M6: Dropped category column from TrainingModule")
|
||||
migrated = True
|
||||
|
||||
# M7: Convert goals from JSON array to plain text
|
||||
if _tableExists("TrainingModule") and _columnExists("TrainingModule", "goals"):
|
||||
cur.execute("""
|
||||
UPDATE "TrainingModule"
|
||||
SET "goals" = subq.plainText
|
||||
FROM (
|
||||
SELECT id,
|
||||
string_agg(elem->>'text', E'\\n') AS plainText
|
||||
FROM "TrainingModule",
|
||||
LATERAL jsonb_array_elements("goals"::jsonb) AS elem
|
||||
WHERE "goals" IS NOT NULL
|
||||
AND "goals" LIKE '[%'
|
||||
GROUP BY id
|
||||
) subq
|
||||
WHERE "TrainingModule".id = subq.id
|
||||
""")
|
||||
rowCount = cur.rowcount
|
||||
if rowCount > 0:
|
||||
logger.info(f"Migration M7: Converted {rowCount} goals fields from JSON to plain text")
|
||||
migrated = True
|
||||
|
||||
# M8: Create ModulePersonaMapping table
|
||||
if not _tableExists("ModulePersonaMapping"):
|
||||
cur.execute("""
|
||||
CREATE TABLE "ModulePersonaMapping" (
|
||||
id TEXT PRIMARY KEY,
|
||||
"moduleId" TEXT NOT NULL,
|
||||
"personaId" TEXT NOT NULL,
|
||||
"instanceId" TEXT NOT NULL,
|
||||
"createdAt" TEXT,
|
||||
"updatedAt" TEXT,
|
||||
UNIQUE("moduleId", "personaId")
|
||||
)
|
||||
""")
|
||||
cur.execute('CREATE INDEX IF NOT EXISTS idx_mpm_module ON "ModulePersonaMapping" ("moduleId")')
|
||||
cur.execute('CREATE INDEX IF NOT EXISTS idx_mpm_persona ON "ModulePersonaMapping" ("personaId")')
|
||||
logger.info("Migration M8: Created ModulePersonaMapping table")
|
||||
migrated = True
|
||||
|
||||
if migrated:
|
||||
conn.commit()
|
||||
logger.info("CommCoach DB migrations committed")
|
||||
else:
|
||||
conn.rollback()
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
except ImportError:
|
||||
logger.debug("psycopg2 not available, skipping CommCoach DB migrations")
|
||||
except Exception as e:
|
||||
logger.warning(f"CommCoach DB migration failed (non-fatal): {e}")
|
||||
|
||||
|
||||
def _seedBuiltinPersonas():
|
||||
"""Seed builtin roleplay personas into the database."""
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
# All rights reserved.
|
||||
"""
|
||||
CommCoach routes for the backend API.
|
||||
Implements coaching context management, session streaming, tasks, and dashboard.
|
||||
Implements training module management, session streaming, tasks, and dashboard.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
|
@ -23,14 +23,14 @@ from modules.interfaces.interfaceFeatures import getFeatureInterface
|
|||
|
||||
from . import interfaceFeatureCommcoach as interfaceDb
|
||||
from .datamodelCommcoach import (
|
||||
CoachingContext, CoachingContextStatus, CoachingSession, CoachingSessionStatus,
|
||||
TrainingModule, TrainingModuleStatus, CoachingSession, CoachingSessionStatus,
|
||||
CoachingMessage, CoachingMessageRole, CoachingMessageContentType,
|
||||
CoachingTask, CoachingTaskStatus,
|
||||
CoachingPersona, CoachingBadge,
|
||||
CreateContextRequest, UpdateContextRequest,
|
||||
CoachingPersona, CoachingBadge, ModulePersonaMapping,
|
||||
CreateModuleRequest, UpdateModuleRequest,
|
||||
SendMessageRequest, CreateTaskRequest, UpdateTaskRequest, UpdateTaskStatusRequest,
|
||||
UpdateProfileRequest,
|
||||
StartSessionRequest, CreatePersonaRequest, UpdatePersonaRequest,
|
||||
StartSessionRequest, CreatePersonaRequest, UpdatePersonaRequest, SetModulePersonasRequest,
|
||||
)
|
||||
from .serviceCommcoach import CommcoachService, emitSessionEvent, getSessionEventQueue, cleanupSessionEvents
|
||||
from modules.shared.i18nRegistry import apiRouteContext
|
||||
|
|
@ -91,204 +91,200 @@ def _validateOwnership(record: dict, context: RequestContext, fieldName: str = "
|
|||
|
||||
|
||||
# =========================================================================
|
||||
# Context Endpoints
|
||||
# Module Endpoints (formerly Context)
|
||||
# =========================================================================
|
||||
|
||||
@router.get("/{instanceId}/contexts")
|
||||
@router.get("/{instanceId}/modules")
|
||||
@limiter.limit("60/minute")
|
||||
async def listContexts(
|
||||
async def listModules(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
includeArchived: bool = False,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""List all coaching contexts for the current user."""
|
||||
"""List all training modules for the current user."""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
userId = str(context.user.id)
|
||||
contexts = interface.getContexts(instanceId, userId, includeArchived=includeArchived)
|
||||
return {"contexts": contexts}
|
||||
modules = interface.getModules(instanceId, userId, includeArchived=includeArchived)
|
||||
return {"modules": modules}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/contexts")
|
||||
@router.post("/{instanceId}/modules")
|
||||
@limiter.limit("20/minute")
|
||||
async def createContext(
|
||||
async def createModule(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
body: CreateContextRequest,
|
||||
body: CreateModuleRequest,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Create a new coaching context/dossier."""
|
||||
"""Create a new training module."""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
userId = str(context.user.id)
|
||||
|
||||
goalsJson = None
|
||||
if body.goals:
|
||||
import uuid as _uuid
|
||||
goalsList = [{"id": str(_uuid.uuid4()), "text": g, "status": "open", "createdAt": ""} for g in body.goals]
|
||||
goalsJson = json.dumps(goalsList)
|
||||
|
||||
contextData = CoachingContext(
|
||||
moduleData = TrainingModule(
|
||||
userId=userId,
|
||||
mandateId=mandateId,
|
||||
instanceId=instanceId,
|
||||
title=body.title,
|
||||
description=body.description,
|
||||
category=body.category,
|
||||
goals=goalsJson,
|
||||
moduleType=body.moduleType,
|
||||
goals=body.goals,
|
||||
personaId=body.personaId,
|
||||
kpiTargets=body.kpiTargets,
|
||||
).model_dump()
|
||||
|
||||
created = interface.createContext(contextData)
|
||||
logger.info(f"CommCoach context created: {created.get('id')} for user {userId}")
|
||||
_audit(context, "commcoach.context.created", "CoachingContext", created.get("id"), f"Title: {body.title}")
|
||||
return {"context": created}
|
||||
created = interface.createModule(moduleData)
|
||||
logger.info(f"CommCoach module created: {created.get('id')} for user {userId}")
|
||||
_audit(context, "commcoach.module.created", "TrainingModule", created.get("id"), f"Title: {body.title}")
|
||||
return {"module": created}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/contexts/{contextId}")
|
||||
@router.get("/{instanceId}/modules/{moduleId}")
|
||||
@limiter.limit("60/minute")
|
||||
async def getContext(
|
||||
async def getModuleDetail(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
contextId: str,
|
||||
moduleId: str,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""Get a coaching context with tasks and score summary."""
|
||||
"""Get a training module with tasks and score summary."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
userId = str(context.user.id)
|
||||
|
||||
ctx = interface.getContext(contextId)
|
||||
if not ctx:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Context not found"))
|
||||
_validateOwnership(ctx, context)
|
||||
mod = interface.getModule(moduleId)
|
||||
if not mod:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Module not found"))
|
||||
_validateOwnership(mod, context)
|
||||
|
||||
tasks = interface.getTasks(contextId, userId)
|
||||
scores = interface.getScores(contextId, userId)
|
||||
sessions = interface.getSessions(contextId, userId)
|
||||
tasks = interface.getTasks(moduleId, userId)
|
||||
scores = interface.getScores(moduleId, userId)
|
||||
sessions = interface.getSessions(moduleId, userId)
|
||||
|
||||
return {
|
||||
"context": ctx,
|
||||
"module": mod,
|
||||
"tasks": tasks,
|
||||
"scores": scores,
|
||||
"sessions": sessions,
|
||||
}
|
||||
|
||||
|
||||
@router.put("/{instanceId}/contexts/{contextId}")
|
||||
@router.put("/{instanceId}/modules/{moduleId}")
|
||||
@limiter.limit("30/minute")
|
||||
async def updateContext(
|
||||
async def updateModuleFields(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
contextId: str,
|
||||
body: UpdateContextRequest,
|
||||
moduleId: str,
|
||||
body: UpdateModuleRequest,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
|
||||
ctx = interface.getContext(contextId)
|
||||
if not ctx:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Context not found"))
|
||||
_validateOwnership(ctx, context)
|
||||
mod = interface.getModule(moduleId)
|
||||
if not mod:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Module not found"))
|
||||
_validateOwnership(mod, context)
|
||||
|
||||
updates = body.model_dump(exclude_none=True)
|
||||
updated = interface.updateContext(contextId, updates)
|
||||
return {"context": updated}
|
||||
updated = interface.updateModule(moduleId, updates)
|
||||
return {"module": updated}
|
||||
|
||||
|
||||
@router.delete("/{instanceId}/contexts/{contextId}")
|
||||
@router.delete("/{instanceId}/modules/{moduleId}")
|
||||
@limiter.limit("10/minute")
|
||||
async def deleteContext(
|
||||
async def deleteModuleAndData(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
contextId: str,
|
||||
moduleId: str,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
|
||||
ctx = interface.getContext(contextId)
|
||||
if not ctx:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Context not found"))
|
||||
_validateOwnership(ctx, context)
|
||||
mod = interface.getModule(moduleId)
|
||||
if not mod:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Module not found"))
|
||||
_validateOwnership(mod, context)
|
||||
|
||||
interface.deleteContext(contextId)
|
||||
interface.deleteModule(moduleId)
|
||||
return {"deleted": True}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/contexts/{contextId}/archive")
|
||||
@router.post("/{instanceId}/modules/{moduleId}/archive")
|
||||
@limiter.limit("10/minute")
|
||||
async def archiveContext(
|
||||
async def archiveModule(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
contextId: str,
|
||||
moduleId: str,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
|
||||
ctx = interface.getContext(contextId)
|
||||
if not ctx:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Context not found"))
|
||||
_validateOwnership(ctx, context)
|
||||
mod = interface.getModule(moduleId)
|
||||
if not mod:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Module not found"))
|
||||
_validateOwnership(mod, context)
|
||||
|
||||
updated = interface.updateContext(contextId, {"status": CoachingContextStatus.ARCHIVED.value})
|
||||
_audit(context, "commcoach.context.archived", "CoachingContext", contextId)
|
||||
return {"context": updated}
|
||||
updated = interface.updateModule(moduleId, {"status": TrainingModuleStatus.ARCHIVED.value})
|
||||
_audit(context, "commcoach.module.archived", "TrainingModule", moduleId)
|
||||
return {"module": updated}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/contexts/{contextId}/activate")
|
||||
@router.post("/{instanceId}/modules/{moduleId}/activate")
|
||||
@limiter.limit("10/minute")
|
||||
async def activateContext(
|
||||
async def activateModule(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
contextId: str,
|
||||
moduleId: str,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
|
||||
ctx = interface.getContext(contextId)
|
||||
if not ctx:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Context not found"))
|
||||
_validateOwnership(ctx, context)
|
||||
mod = interface.getModule(moduleId)
|
||||
if not mod:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Module not found"))
|
||||
_validateOwnership(mod, context)
|
||||
|
||||
updated = interface.updateContext(contextId, {"status": CoachingContextStatus.ACTIVE.value})
|
||||
return {"context": updated}
|
||||
updated = interface.updateModule(moduleId, {"status": TrainingModuleStatus.ACTIVE.value})
|
||||
return {"module": updated}
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Session Endpoints
|
||||
# =========================================================================
|
||||
|
||||
@router.get("/{instanceId}/contexts/{contextId}/sessions")
|
||||
@router.get("/{instanceId}/modules/{moduleId}/sessions")
|
||||
@limiter.limit("60/minute")
|
||||
async def listSessions(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
contextId: str,
|
||||
moduleId: str,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
userId = str(context.user.id)
|
||||
|
||||
ctx = interface.getContext(contextId)
|
||||
if not ctx:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Context not found"))
|
||||
_validateOwnership(ctx, context)
|
||||
mod = interface.getModule(moduleId)
|
||||
if not mod:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Module not found"))
|
||||
_validateOwnership(mod, context)
|
||||
|
||||
sessions = interface.getSessions(contextId, userId)
|
||||
sessions = interface.getSessions(moduleId, userId)
|
||||
return {"sessions": sessions}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/contexts/{contextId}/sessions/start")
|
||||
@router.post("/{instanceId}/modules/{moduleId}/sessions/start")
|
||||
@limiter.limit("10/minute")
|
||||
async def startSession(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
contextId: str,
|
||||
moduleId: str,
|
||||
personaId: Optional[str] = None,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
|
|
@ -297,22 +293,22 @@ async def startSession(
|
|||
interface = _getInterface(context, instanceId)
|
||||
userId = str(context.user.id)
|
||||
|
||||
ctx = interface.getContext(contextId)
|
||||
if not ctx:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Context not found"))
|
||||
_validateOwnership(ctx, context)
|
||||
mod = interface.getModule(moduleId)
|
||||
if not mod:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Module not found"))
|
||||
_validateOwnership(mod, context)
|
||||
|
||||
activeSession = interface.getActiveSession(contextId, userId)
|
||||
activeSession = interface.getActiveSession(moduleId, userId)
|
||||
if activeSession:
|
||||
sessionId = activeSession.get("id")
|
||||
messages = interface.getMessages(sessionId)
|
||||
|
||||
async def _resumedEventGenerator():
|
||||
service = CommcoachService(context.user, mandateId, instanceId)
|
||||
greetingText = await service.generateResumeGreeting(sessionId, contextId, messages, interface)
|
||||
greetingText = await service.generateResumeGreeting(sessionId, moduleId, messages, interface)
|
||||
assistantMsg = CoachingMessage(
|
||||
sessionId=sessionId,
|
||||
contextId=contextId,
|
||||
moduleId=moduleId,
|
||||
userId=userId,
|
||||
role=CoachingMessageRole.ASSISTANT,
|
||||
content=greetingText,
|
||||
|
|
@ -323,7 +319,7 @@ async def startSession(
|
|||
greetingForFrontend = {
|
||||
"id": createdGreeting.get("id"),
|
||||
"sessionId": sessionId,
|
||||
"contextId": contextId,
|
||||
"moduleId": moduleId,
|
||||
"role": "assistant",
|
||||
"content": greetingText,
|
||||
"contentType": "text",
|
||||
|
|
@ -336,10 +332,10 @@ async def startSession(
|
|||
try:
|
||||
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
||||
voiceInterface = getVoiceInterface(context.user, mandateId)
|
||||
from .serviceCommcoach import _getUserVoicePrefs, _stripMarkdownForTts, _buildTtsConfigErrorMessage
|
||||
language, voiceName = _getUserVoicePrefs(userId, mandateId)
|
||||
from .serviceCommcoach import getUserVoicePrefs, stripMarkdownForTts, buildTtsConfigErrorMessage
|
||||
language, voiceName = getUserVoicePrefs(userId, mandateId)
|
||||
ttsResult = await voiceInterface.textToSpeech(
|
||||
text=_stripMarkdownForTts(greetingText),
|
||||
text=stripMarkdownForTts(greetingText),
|
||||
languageCode=language,
|
||||
voiceName=voiceName,
|
||||
)
|
||||
|
|
@ -365,7 +361,7 @@ async def startSession(
|
|||
)
|
||||
|
||||
sessionData = CoachingSession(
|
||||
contextId=contextId,
|
||||
moduleId=moduleId,
|
||||
userId=userId,
|
||||
mandateId=mandateId,
|
||||
instanceId=instanceId,
|
||||
|
|
@ -378,7 +374,7 @@ async def startSession(
|
|||
await emitSessionEvent(sessionId, "sessionState", {"session": created, "resumed": False})
|
||||
|
||||
service = CommcoachService(context.user, mandateId, instanceId)
|
||||
asyncio.create_task(service.processSessionOpening(sessionId, contextId, interface))
|
||||
asyncio.create_task(service.processSessionOpening(sessionId, moduleId, interface))
|
||||
|
||||
async def _newSessionEventGenerator():
|
||||
from modules.shared.timeUtils import getIsoTimestamp
|
||||
|
|
@ -399,8 +395,8 @@ async def startSession(
|
|||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
logger.info(f"CommCoach session started (streaming): {sessionId} for context {contextId}")
|
||||
_audit(context, "commcoach.session.started", "CoachingSession", sessionId, f"Context: {contextId}")
|
||||
logger.info(f"CommCoach session started (streaming): {sessionId} for module {moduleId}")
|
||||
_audit(context, "commcoach.session.started", "CoachingSession", sessionId, f"Module: {moduleId}")
|
||||
return StreamingResponse(
|
||||
_newSessionEventGenerator(),
|
||||
media_type="text/event-stream",
|
||||
|
|
@ -471,10 +467,10 @@ async def cancelSession(
|
|||
raise HTTPException(status_code=404, detail=routeApiMsg("Session not found"))
|
||||
_validateOwnership(session, context)
|
||||
|
||||
from modules.shared.timeUtils import getIsoTimestamp
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
interface.updateSession(sessionId, {
|
||||
"status": CoachingSessionStatus.CANCELLED.value,
|
||||
"endedAt": getIsoTimestamp(),
|
||||
"endedAt": getUtcTimestamp(),
|
||||
})
|
||||
return {"cancelled": True}
|
||||
|
||||
|
|
@ -504,7 +500,7 @@ async def sendMessageStream(
|
|||
if session.get("status") != CoachingSessionStatus.ACTIVE.value:
|
||||
raise HTTPException(status_code=400, detail=routeApiMsg("Session is not active"))
|
||||
|
||||
contextId = session.get("contextId")
|
||||
moduleId = session.get("moduleId")
|
||||
service = CommcoachService(context.user, mandateId, instanceId)
|
||||
|
||||
existingTask = _activeProcessTasks.get(sessionId)
|
||||
|
|
@ -517,7 +513,7 @@ async def sendMessageStream(
|
|||
|
||||
task = asyncio.create_task(
|
||||
service.processMessage(
|
||||
sessionId, contextId, body.content, interface,
|
||||
sessionId, moduleId, body.content, interface,
|
||||
fileIds=body.fileIds,
|
||||
dataSourceIds=body.dataSourceIds,
|
||||
featureDataSourceIds=body.featureDataSourceIds,
|
||||
|
|
@ -584,14 +580,14 @@ async def sendAudioStream(
|
|||
if not audioBody:
|
||||
raise HTTPException(status_code=400, detail=routeApiMsg("No audio data received"))
|
||||
|
||||
from .serviceCommcoach import _getUserVoicePrefs
|
||||
language, _ = _getUserVoicePrefs(str(context.user.id), mandateId)
|
||||
from .serviceCommcoach import getUserVoicePrefs
|
||||
language, _ = getUserVoicePrefs(str(context.user.id), mandateId)
|
||||
|
||||
contextId = session.get("contextId")
|
||||
moduleId = session.get("moduleId")
|
||||
service = CommcoachService(context.user, mandateId, instanceId)
|
||||
|
||||
asyncio.create_task(
|
||||
service.processAudioMessage(sessionId, contextId, audioBody, language, interface)
|
||||
service.processAudioMessage(sessionId, moduleId, audioBody, language, interface)
|
||||
)
|
||||
|
||||
async def _eventGenerator():
|
||||
|
|
@ -680,27 +676,27 @@ async def streamSession(
|
|||
# Task Endpoints
|
||||
# =========================================================================
|
||||
|
||||
@router.get("/{instanceId}/contexts/{contextId}/tasks")
|
||||
@router.get("/{instanceId}/modules/{moduleId}/tasks")
|
||||
@limiter.limit("60/minute")
|
||||
async def listTasks(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
contextId: str,
|
||||
moduleId: str,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
userId = str(context.user.id)
|
||||
tasks = interface.getTasks(contextId, userId)
|
||||
tasks = interface.getTasks(moduleId, userId)
|
||||
return {"tasks": tasks}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/contexts/{contextId}/tasks")
|
||||
@router.post("/{instanceId}/modules/{moduleId}/tasks")
|
||||
@limiter.limit("30/minute")
|
||||
async def createTask(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
contextId: str,
|
||||
moduleId: str,
|
||||
body: CreateTaskRequest,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
|
|
@ -708,13 +704,13 @@ async def createTask(
|
|||
interface = _getInterface(context, instanceId)
|
||||
userId = str(context.user.id)
|
||||
|
||||
ctx = interface.getContext(contextId)
|
||||
if not ctx:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Context not found"))
|
||||
_validateOwnership(ctx, context)
|
||||
mod = interface.getModule(moduleId)
|
||||
if not mod:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Module not found"))
|
||||
_validateOwnership(mod, context)
|
||||
|
||||
taskData = CoachingTask(
|
||||
contextId=contextId,
|
||||
moduleId=moduleId,
|
||||
userId=userId,
|
||||
mandateId=mandateId,
|
||||
title=body.title,
|
||||
|
|
@ -768,8 +764,8 @@ async def updateTaskStatus(
|
|||
|
||||
updates = {"status": body.status.value}
|
||||
if body.status == CoachingTaskStatus.DONE:
|
||||
from modules.shared.timeUtils import getIsoTimestamp
|
||||
updates["completedAt"] = getIsoTimestamp()
|
||||
from modules.shared.timeUtils import getUtcTimestamp
|
||||
updates["completedAt"] = getUtcTimestamp()
|
||||
|
||||
updated = interface.updateTask(taskId, updates)
|
||||
return {"task": updated}
|
||||
|
|
@ -853,12 +849,12 @@ async def updateProfile(
|
|||
# Export Endpoints (Iteration 2)
|
||||
# =========================================================================
|
||||
|
||||
@router.get("/{instanceId}/contexts/{contextId}/export")
|
||||
@router.get("/{instanceId}/modules/{moduleId}/export")
|
||||
@limiter.limit("10/minute")
|
||||
async def exportDossier(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
contextId: str,
|
||||
moduleId: str,
|
||||
format: str = "md",
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
|
|
@ -867,26 +863,26 @@ async def exportDossier(
|
|||
interface = _getInterface(context, instanceId)
|
||||
userId = str(context.user.id)
|
||||
|
||||
ctx = interface.getContext(contextId)
|
||||
if not ctx:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Context not found"))
|
||||
_validateOwnership(ctx, context)
|
||||
mod = interface.getModule(moduleId)
|
||||
if not mod:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Module not found"))
|
||||
_validateOwnership(mod, context)
|
||||
|
||||
tasks = interface.getTasks(contextId, userId)
|
||||
scores = interface.getScores(contextId, userId)
|
||||
sessions = interface.getSessions(contextId, userId)
|
||||
tasks = interface.getTasks(moduleId, userId)
|
||||
scores = interface.getScores(moduleId, userId)
|
||||
sessions = interface.getSessions(moduleId, userId)
|
||||
|
||||
from .serviceCommcoachExport import buildDossierMarkdown, renderDossierPdf
|
||||
_audit(context, "commcoach.export.requested", "CoachingContext", contextId, f"format={format}")
|
||||
_audit(context, "commcoach.export.requested", "TrainingModule", moduleId, f"format={format}")
|
||||
|
||||
if format == "pdf":
|
||||
pdfBytes = await renderDossierPdf(ctx, sessions, tasks, scores)
|
||||
pdfBytes = await renderDossierPdf(mod, sessions, tasks, scores)
|
||||
return Response(content=pdfBytes, media_type="application/pdf",
|
||||
headers={"Content-Disposition": f'attachment; filename="dossier_{contextId[:8]}.pdf"'})
|
||||
headers={"Content-Disposition": f'attachment; filename="dossier_{moduleId[:8]}.pdf"'})
|
||||
|
||||
md = buildDossierMarkdown(ctx, sessions, tasks, scores)
|
||||
md = buildDossierMarkdown(mod, sessions, tasks, scores)
|
||||
return Response(content=md, media_type="text/markdown",
|
||||
headers={"Content-Disposition": f'attachment; filename="dossier_{contextId[:8]}.md"'})
|
||||
headers={"Content-Disposition": f'attachment; filename="dossier_{moduleId[:8]}.md"'})
|
||||
|
||||
|
||||
@router.get("/{instanceId}/sessions/{sessionId}/export")
|
||||
|
|
@ -907,11 +903,11 @@ async def exportSession(
|
|||
raise HTTPException(status_code=404, detail=routeApiMsg("Session not found"))
|
||||
_validateOwnership(session, context)
|
||||
|
||||
contextId = session.get("contextId")
|
||||
moduleId = session.get("moduleId")
|
||||
userId = str(context.user.id)
|
||||
messages = interface.getMessages(sessionId)
|
||||
tasks = interface.getTasks(contextId, userId) if contextId else []
|
||||
scores = interface.getScores(contextId, userId) if contextId else []
|
||||
tasks = interface.getTasks(moduleId, userId) if moduleId else []
|
||||
scores = interface.getScores(moduleId, userId) if moduleId else []
|
||||
|
||||
from .serviceCommcoachExport import buildSessionMarkdown, renderSessionPdf
|
||||
_audit(context, "commcoach.export.requested", "CoachingSession", sessionId, f"format={format}")
|
||||
|
|
@ -935,13 +931,47 @@ async def exportSession(
|
|||
async def listPersonas(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
pagination: Optional[str] = Query(None),
|
||||
mode: Optional[str] = Query(None, description="'filterValues' or 'ids'"),
|
||||
column: Optional[str] = Query(None, description="Column key for mode=filterValues"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
userId = str(context.user.id)
|
||||
personas = interface.getPersonas(userId, instanceId)
|
||||
return {"personas": personas}
|
||||
allPersonas = interface.getAllPersonas(instanceId)
|
||||
|
||||
if mode == "filterValues":
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail=routeApiMsg("column parameter required"))
|
||||
return handleFilterValuesInMemory(allPersonas, column, pagination)
|
||||
if mode == "ids":
|
||||
from modules.routes.routeHelpers import handleIdsInMemory
|
||||
return handleIdsInMemory(allPersonas, pagination)
|
||||
|
||||
if pagination:
|
||||
import json as _json
|
||||
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
|
||||
from modules.routes.routeHelpers import applyFiltersAndSort, paginateInMemory
|
||||
paginationDict = _json.loads(pagination)
|
||||
paginationDict = normalize_pagination_dict(paginationDict)
|
||||
paginationParams = PaginationParams(**paginationDict)
|
||||
filtered = applyFiltersAndSort(allPersonas, paginationParams)
|
||||
pageItems, totalItems = paginateInMemory(filtered, paginationParams)
|
||||
import math
|
||||
return {
|
||||
"items": pageItems,
|
||||
"pagination": PaginationMetadata(
|
||||
currentPage=paginationParams.page,
|
||||
pageSize=paginationParams.pageSize,
|
||||
totalItems=totalItems,
|
||||
totalPages=math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0,
|
||||
sort=[s.model_dump() for s in paginationParams.sort] if paginationParams.sort else [],
|
||||
filters=paginationParams.filters,
|
||||
).model_dump(),
|
||||
}
|
||||
|
||||
return {"items": allPersonas, "pagination": None}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/personas")
|
||||
|
|
@ -1017,6 +1047,43 @@ async def deletePersonaRoute(
|
|||
return {"deleted": True}
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Module-Persona Mapping Endpoints
|
||||
# =========================================================================
|
||||
|
||||
@router.get("/{instanceId}/modules/{moduleId}/personas")
|
||||
@limiter.limit("60/minute")
|
||||
async def getModulePersonas(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
moduleId: str,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
mappings = interface.getModulePersonas(moduleId)
|
||||
personaIds = [m["personaId"] for m in mappings]
|
||||
return {"personaIds": personaIds}
|
||||
|
||||
|
||||
@router.put("/{instanceId}/modules/{moduleId}/personas")
|
||||
@limiter.limit("20/minute")
|
||||
async def setModulePersonas(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
moduleId: str,
|
||||
body: SetModulePersonasRequest,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
module = interface.getModule(moduleId)
|
||||
if not module:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Module not found"))
|
||||
interface.setModulePersonas(moduleId, body.personaIds, instanceId)
|
||||
return {"personaIds": body.personaIds}
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Badge + Score History Endpoints (Iteration 2)
|
||||
# =========================================================================
|
||||
|
|
@ -1035,16 +1102,46 @@ async def listBadges(
|
|||
return {"badges": badges}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/contexts/{contextId}/scores/history")
|
||||
@router.get("/{instanceId}/modules/{moduleId}/scores/history")
|
||||
@limiter.limit("60/minute")
|
||||
async def getScoreHistory(
|
||||
request: Request,
|
||||
instanceId: str,
|
||||
contextId: str,
|
||||
moduleId: str,
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
interface = _getInterface(context, instanceId)
|
||||
userId = str(context.user.id)
|
||||
history = interface.getScoreHistory(contextId, userId)
|
||||
history = interface.getScoreHistory(moduleId, userId)
|
||||
return {"history": history}
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Backward-Compatibility Redirects (old /contexts/ paths → /modules/)
|
||||
# =========================================================================
|
||||
|
||||
@router.get("/{instanceId}/contexts")
|
||||
async def _redirectListContexts(instanceId: str, request: Request):
|
||||
qs = f"?{request.query_params}" if request.query_params else ""
|
||||
return Response(status_code=301, headers={"Location": f"/api/commcoach/{instanceId}/modules{qs}"})
|
||||
|
||||
|
||||
@router.post("/{instanceId}/contexts")
|
||||
async def _redirectCreateContext(instanceId: str, request: Request):
|
||||
return Response(status_code=301, headers={"Location": f"/api/commcoach/{instanceId}/modules"})
|
||||
|
||||
|
||||
@router.get("/{instanceId}/contexts/{contextId}")
|
||||
async def _redirectGetContext(instanceId: str, contextId: str, request: Request):
|
||||
return Response(status_code=301, headers={"Location": f"/api/commcoach/{instanceId}/modules/{contextId}"})
|
||||
|
||||
|
||||
@router.put("/{instanceId}/contexts/{contextId}")
|
||||
async def _redirectUpdateContext(instanceId: str, contextId: str, request: Request):
|
||||
return Response(status_code=301, headers={"Location": f"/api/commcoach/{instanceId}/modules/{contextId}"})
|
||||
|
||||
|
||||
@router.delete("/{instanceId}/contexts/{contextId}")
|
||||
async def _redirectDeleteContext(instanceId: str, contextId: str, request: Request):
|
||||
return Response(status_code=301, headers={"Location": f"/api/commcoach/{instanceId}/modules/{contextId}"})
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ from typing import Optional, Dict, Any, List
|
|||
|
||||
from modules.datamodels.datamodelUam import User
|
||||
from modules.datamodels.datamodelAi import AiCallRequest, AiCallOptions, OperationTypeEnum, PriorityEnum
|
||||
from modules.shared.timeUtils import getIsoTimestamp
|
||||
from modules.shared.timeUtils import getIsoTimestamp, getUtcTimestamp
|
||||
|
||||
from .datamodelCommcoach import (
|
||||
CoachingMessage, CoachingMessageRole, CoachingMessageContentType,
|
||||
|
|
@ -79,7 +79,7 @@ def _selectConfiguredVoice(
|
|||
return None
|
||||
|
||||
|
||||
def _buildTtsConfigErrorMessage(language: str, voiceName: Optional[str], rawError: str = "") -> str:
|
||||
def buildTtsConfigErrorMessage(language: str, voiceName: Optional[str], rawError: str = "") -> str:
|
||||
if voiceName:
|
||||
return (
|
||||
f'Die konfigurierte Stimme "{voiceName}" für {language} ist ungültig oder nicht verfügbar. '
|
||||
|
|
@ -91,7 +91,7 @@ def _buildTtsConfigErrorMessage(language: str, voiceName: Optional[str], rawErro
|
|||
)
|
||||
|
||||
|
||||
def _getUserVoicePrefs(userId: str, mandateId: Optional[str] = None) -> tuple:
|
||||
def getUserVoicePrefs(userId: str, mandateId: Optional[str] = None) -> tuple:
|
||||
"""Load voice language and voiceName from central UserVoicePreferences.
|
||||
Returns (language, voiceName) tuple."""
|
||||
try:
|
||||
|
|
@ -160,7 +160,7 @@ def _getUserVoicePrefs(userId: str, mandateId: Optional[str] = None) -> tuple:
|
|||
return ("de-DE", None)
|
||||
|
||||
|
||||
def _stripMarkdownForTts(text: str) -> str:
|
||||
def stripMarkdownForTts(text: str) -> str:
|
||||
"""Strip markdown formatting so TTS reads clean speech text."""
|
||||
t = text
|
||||
t = re.sub(r'\*\*(.+?)\*\*', r'\1', t)
|
||||
|
|
@ -346,9 +346,9 @@ async def _generateAndEmitTts(sessionId: str, speechText: str, currentUser, mand
|
|||
from modules.interfaces.interfaceVoiceObjects import getVoiceInterface
|
||||
import base64
|
||||
voiceInterface = getVoiceInterface(currentUser, mandateId)
|
||||
language, voiceName = _getUserVoicePrefs(str(currentUser.id), mandateId)
|
||||
language, voiceName = getUserVoicePrefs(str(currentUser.id), mandateId)
|
||||
ttsResult = await voiceInterface.textToSpeech(
|
||||
text=_stripMarkdownForTts(speechText),
|
||||
text=stripMarkdownForTts(speechText),
|
||||
languageCode=language,
|
||||
voiceName=voiceName,
|
||||
)
|
||||
|
|
@ -362,7 +362,7 @@ async def _generateAndEmitTts(sessionId: str, speechText: str, currentUser, mand
|
|||
return
|
||||
errorDetail = ttsResult.get("error", "Text-to-Speech failed")
|
||||
await emitSessionEvent(sessionId, "error", {
|
||||
"message": _buildTtsConfigErrorMessage(language, voiceName, errorDetail),
|
||||
"message": buildTtsConfigErrorMessage(language, voiceName, errorDetail),
|
||||
"detail": errorDetail,
|
||||
"ttsLanguage": language,
|
||||
"ttsVoice": voiceName,
|
||||
|
|
@ -370,7 +370,7 @@ async def _generateAndEmitTts(sessionId: str, speechText: str, currentUser, mand
|
|||
except Exception as e:
|
||||
logger.warning(f"TTS failed for session {sessionId}: {e}")
|
||||
await emitSessionEvent(sessionId, "error", {
|
||||
"message": _buildTtsConfigErrorMessage("de-DE", None, str(e)),
|
||||
"message": buildTtsConfigErrorMessage("de-DE", None, str(e)),
|
||||
"detail": str(e),
|
||||
})
|
||||
|
||||
|
|
@ -420,7 +420,7 @@ async def _saveOrUpdateDocument(doc: Dict[str, Any], contextId: str, userId: str
|
|||
logger.info(f"Document saved as platform FileItem: {fileItem.id} ({title})")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to save document as FileItem: {e}")
|
||||
logger.error(f"Failed to save document as FileItem: {e}", exc_info=True)
|
||||
|
||||
|
||||
|
||||
|
|
@ -483,12 +483,12 @@ def _loadDocumentContents(docIds: List[str], interface, mandateId: str = None, i
|
|||
content = ""
|
||||
try:
|
||||
from modules.datamodels.datamodelKnowledge import FileContentIndex
|
||||
idxRecords = mgmtIf.db.getRecordset(FileContentIndex, recordFilter={"fileId": fId})
|
||||
idxRecords = mgmtIf.db.getRecordset(FileContentIndex, recordFilter={"id": fId})
|
||||
if idxRecords:
|
||||
idx = idxRecords[0] if isinstance(idxRecords[0], dict) else idxRecords[0].model_dump()
|
||||
content = (idx.get("extractedText") or "")[:DOC_CONTENT_MAX_CHARS]
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to load FileContentIndex for {fId}: {e}")
|
||||
results.append({
|
||||
"id": fId,
|
||||
"title": f.get("fileName") or f.get("name") or "Dokument",
|
||||
|
|
@ -557,13 +557,13 @@ def _getDocumentSummaries(contextId: str, userId: str, interface,
|
|||
try:
|
||||
from modules.datamodels.datamodelKnowledge import FileContentIndex
|
||||
idxRecords = mgmtIf.db.getRecordset(
|
||||
FileContentIndex, recordFilter={"fileId": fId}
|
||||
FileContentIndex, recordFilter={"id": fId}
|
||||
)
|
||||
if idxRecords:
|
||||
idx = idxRecords[0] if isinstance(idxRecords[0], dict) else idxRecords[0].model_dump()
|
||||
snippet = (idx.get("extractedText") or "")[:200]
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to load FileContentIndex for {fId}: {e}")
|
||||
if snippet:
|
||||
summaries.append(f"[{name}] {snippet}...")
|
||||
else:
|
||||
|
|
@ -690,12 +690,12 @@ def _buildConversationHistory(messages: List[Dict[str, Any]]) -> List[Dict[str,
|
|||
return history
|
||||
|
||||
|
||||
_TTS_WORD_LIMIT = 200
|
||||
_TTS_WORD_LIMIT = 80
|
||||
|
||||
|
||||
async def _prepareSpeechText(fullText: str, callAiFn) -> str:
|
||||
"""Prepare text for TTS. Short responses used directly; long ones get summarized."""
|
||||
cleaned = _stripMarkdownForTts(fullText)
|
||||
cleaned = stripMarkdownForTts(fullText)
|
||||
wordCount = len(cleaned.split())
|
||||
if wordCount <= _TTS_WORD_LIMIT:
|
||||
return cleaned
|
||||
|
|
@ -748,7 +748,7 @@ class CommcoachService:
|
|||
# Store user message
|
||||
userMsg = CoachingMessage(
|
||||
sessionId=sessionId,
|
||||
contextId=contextId,
|
||||
moduleId=contextId,
|
||||
userId=self.userId,
|
||||
role=CoachingMessageRole.USER,
|
||||
content=userContent,
|
||||
|
|
@ -764,7 +764,7 @@ class CommcoachService:
|
|||
})
|
||||
|
||||
# Build context
|
||||
context = interface.getContext(contextId)
|
||||
context = interface.getModule(contextId)
|
||||
if not context:
|
||||
logger.error(f"Context {contextId} not found")
|
||||
return createdUserMsg
|
||||
|
|
@ -857,7 +857,7 @@ class CommcoachService:
|
|||
|
||||
assistantMsg = CoachingMessage(
|
||||
sessionId=sessionId,
|
||||
contextId=contextId,
|
||||
moduleId=contextId,
|
||||
userId=self.userId,
|
||||
role=CoachingMessageRole.ASSISTANT,
|
||||
content=textContent,
|
||||
|
|
@ -906,10 +906,14 @@ class CommcoachService:
|
|||
)
|
||||
agentService = getService("agent", serviceContext)
|
||||
|
||||
from modules.datamodels.datamodelAi import PriorityEnum, OperationTypeEnum
|
||||
config = AgentConfig(
|
||||
toolSet="commcoach" if useTools else "none",
|
||||
maxRounds=3 if useTools else 1,
|
||||
temperature=0.4,
|
||||
excludeAllTools=not useTools,
|
||||
priority=PriorityEnum.SPEED if not useTools else None,
|
||||
operationType=OperationTypeEnum.DATA_QUERY if not useTools else None,
|
||||
)
|
||||
|
||||
buildRagContextFn = _createCommcoachRagFn(
|
||||
|
|
@ -946,6 +950,8 @@ class CommcoachService:
|
|||
await emitSessionEvent(sessionId, "toolResult", event.data or {})
|
||||
elif event.type == AgentEventTypeEnum.AGENT_PROGRESS:
|
||||
await emitSessionEvent(sessionId, "agentProgress", event.data or {})
|
||||
elif event.type == AgentEventTypeEnum.FILE_CREATED:
|
||||
await emitSessionEvent(sessionId, "documentCreated", event.data or {})
|
||||
elif event.type == AgentEventTypeEnum.ERROR:
|
||||
await emitSessionEvent(sessionId, "error", {"message": event.content or "Agent error"})
|
||||
|
||||
|
|
@ -958,7 +964,7 @@ class CommcoachService:
|
|||
"""
|
||||
await emitSessionEvent(sessionId, "status", {"label": "Coach bereitet sich vor..."})
|
||||
|
||||
context = interface.getContext(contextId)
|
||||
context = interface.getModule(contextId)
|
||||
if not context:
|
||||
logger.error(f"Context {contextId} not found")
|
||||
await emitSessionEvent(sessionId, "error", {"message": "Context not found"})
|
||||
|
|
@ -987,10 +993,14 @@ class CommcoachService:
|
|||
)
|
||||
|
||||
isFirstSession = not previousSessionSummaries or len(previousSessionSummaries) == 0
|
||||
logger.info(f"Session opening {sessionId}: isFirstSession={isFirstSession}, previousSessions={len(previousSessionSummaries) if previousSessionSummaries else 0}, persona={persona.get('key') if persona else None}")
|
||||
|
||||
if persona and persona.get("key") != "coach":
|
||||
personaLabel = persona.get("label", "Gesprächspartner")
|
||||
openingUserPrompt = f"Beginne das Gespräch in deiner Rolle als {personaLabel}. Stelle dich kurz vor und eröffne die Situation gemäss deiner Rollenbeschreibung."
|
||||
if isFirstSession:
|
||||
openingUserPrompt = f"Beginne das Gespräch in deiner Rolle als {personaLabel}. Stelle dich kurz vor und eröffne die Situation gemäss deiner Rollenbeschreibung."
|
||||
else:
|
||||
openingUserPrompt = f"Du bist weiterhin in deiner Rolle als {personaLabel}. Der Benutzer kehrt zu einem Folgegespräch zurück. Begrüsse ihn kurz zurück, beziehe dich auf das letzte Gespräch (siehe bisherige Sessions) und knüpfe dort an. Stelle dich NICHT erneut vor."
|
||||
elif isFirstSession:
|
||||
openingUserPrompt = "Dies ist die ERSTE Session zu diesem Thema. Begrüsse den Benutzer, stelle das Thema kurz vor und stelle eine offene Einstiegsfrage. Erfinde KEINE vorherigen Gespräche oder Zusammenfassungen."
|
||||
else:
|
||||
|
|
@ -1024,7 +1034,7 @@ class CommcoachService:
|
|||
|
||||
assistantMsg = CoachingMessage(
|
||||
sessionId=sessionId,
|
||||
contextId=contextId,
|
||||
moduleId=contextId,
|
||||
userId=self.userId,
|
||||
role=CoachingMessageRole.ASSISTANT,
|
||||
content=textContent,
|
||||
|
|
@ -1046,7 +1056,7 @@ class CommcoachService:
|
|||
|
||||
async def generateResumeGreeting(self, sessionId: str, contextId: str, messages: list, interface) -> str:
|
||||
"""Generate a follow-up greeting when user returns to an active session."""
|
||||
context = interface.getContext(contextId)
|
||||
context = interface.getModule(contextId)
|
||||
if not context:
|
||||
raise ValueError(f"Context {contextId} not found for resume greeting")
|
||||
contextTitle = context.get("title", "Coaching")
|
||||
|
|
@ -1100,14 +1110,16 @@ class CommcoachService:
|
|||
if not session:
|
||||
return {}
|
||||
|
||||
contextId = session.get("contextId")
|
||||
context = interface.getContext(contextId) if contextId else None
|
||||
contextId = session.get("moduleId")
|
||||
if not contextId:
|
||||
logger.error(f"completeSession: session {sessionId} has no moduleId")
|
||||
context = interface.getModule(contextId) if contextId else None
|
||||
messages = interface.getMessages(sessionId)
|
||||
|
||||
if len(messages) < 2:
|
||||
interface.updateSession(sessionId, {
|
||||
"status": CoachingSessionStatus.COMPLETED.value,
|
||||
"endedAt": getIsoTimestamp(),
|
||||
"endedAt": getUtcTimestamp(),
|
||||
"compressedHistorySummary": None,
|
||||
"compressedHistoryUpToMessageCount": None,
|
||||
})
|
||||
|
|
@ -1156,7 +1168,7 @@ class CommcoachService:
|
|||
for taskData in extractedTasks[:3]:
|
||||
if isinstance(taskData, dict) and taskData.get("title"):
|
||||
newTask = CoachingTask(
|
||||
contextId=contextId,
|
||||
moduleId=contextId,
|
||||
sessionId=sessionId,
|
||||
userId=self.userId,
|
||||
mandateId=self.mandateId,
|
||||
|
|
@ -1181,7 +1193,7 @@ class CommcoachService:
|
|||
for scoreData in scores:
|
||||
if isinstance(scoreData, dict) and "dimension" in scoreData and "score" in scoreData:
|
||||
newScore = CoachingScore(
|
||||
contextId=contextId,
|
||||
moduleId=contextId,
|
||||
sessionId=sessionId,
|
||||
userId=self.userId,
|
||||
mandateId=self.mandateId,
|
||||
|
|
@ -1213,7 +1225,7 @@ class CommcoachService:
|
|||
existingInsights.append({"text": insightText, "sessionId": sessionId, "createdAt": getIsoTimestamp()})
|
||||
await emitSessionEvent(sessionId, "insightGenerated", {"text": insightText, "sessionId": sessionId})
|
||||
if contextId and existingInsights:
|
||||
interface.updateContext(contextId, {"insights": json.dumps(existingInsights[-10:])})
|
||||
interface.updateModule(contextId, {"insights": json.dumps(existingInsights[-10:])})
|
||||
except Exception as e:
|
||||
logger.warning(f"Insight generation failed: {e}")
|
||||
|
||||
|
|
@ -1252,21 +1264,18 @@ class CommcoachService:
|
|||
logger.warning(f"Coaching session indexing failed (non-blocking): {e}")
|
||||
|
||||
# Calculate duration
|
||||
startedAt = session.get("startedAt", "")
|
||||
startedAt = session.get("startedAt")
|
||||
durationSeconds = 0
|
||||
if startedAt:
|
||||
try:
|
||||
from datetime import datetime
|
||||
start = datetime.fromisoformat(startedAt.replace("Z", "+00:00"))
|
||||
end = datetime.now(start.tzinfo) if start.tzinfo else datetime.now()
|
||||
durationSeconds = int((end - start).total_seconds())
|
||||
except Exception:
|
||||
pass
|
||||
from datetime import datetime, timezone
|
||||
start = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||
end = datetime.now(timezone.utc)
|
||||
durationSeconds = int((end - start).total_seconds())
|
||||
|
||||
# Update session - clear compressed history so it never leaks into new sessions
|
||||
sessionUpdates = {
|
||||
"status": CoachingSessionStatus.COMPLETED.value,
|
||||
"endedAt": getIsoTimestamp(),
|
||||
"endedAt": getUtcTimestamp(),
|
||||
"summary": summary,
|
||||
"durationSeconds": durationSeconds,
|
||||
"messageCount": len(messages),
|
||||
|
|
@ -1283,9 +1292,9 @@ class CommcoachService:
|
|||
if contextId:
|
||||
allSessions = interface.getSessions(contextId, self.userId)
|
||||
completedCount = len([s for s in allSessions if s.get("status") == CoachingSessionStatus.COMPLETED.value])
|
||||
interface.updateContext(contextId, {
|
||||
interface.updateModule(contextId, {
|
||||
"sessionCount": completedCount,
|
||||
"lastSessionAt": getIsoTimestamp(),
|
||||
"lastSessionAt": getUtcTimestamp(),
|
||||
})
|
||||
|
||||
# Update user profile streak
|
||||
|
|
@ -1324,26 +1333,23 @@ class CommcoachService:
|
|||
if not profile:
|
||||
profile = interface.getOrCreateProfile(self.userId, self.mandateId, self.instanceId)
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timezone
|
||||
|
||||
lastSessionAt = profile.get("lastSessionAt")
|
||||
currentStreak = profile.get("streakDays", 0)
|
||||
longestStreak = profile.get("longestStreak", 0)
|
||||
totalSessions = profile.get("totalSessions", 0)
|
||||
|
||||
today = datetime.now().date()
|
||||
today = datetime.now(timezone.utc).date()
|
||||
isConsecutive = False
|
||||
|
||||
if lastSessionAt:
|
||||
try:
|
||||
lastDate = datetime.fromisoformat(lastSessionAt.replace("Z", "+00:00")).date()
|
||||
diff = (today - lastDate).days
|
||||
if diff == 1:
|
||||
isConsecutive = True
|
||||
elif diff == 0:
|
||||
isConsecutive = True # Same day, maintain streak
|
||||
except Exception:
|
||||
pass
|
||||
lastDate = datetime.fromtimestamp(lastSessionAt, tz=timezone.utc).date()
|
||||
diff = (today - lastDate).days
|
||||
if diff == 1:
|
||||
isConsecutive = True
|
||||
elif diff == 0:
|
||||
isConsecutive = True
|
||||
|
||||
newStreak = (currentStreak + 1) if isConsecutive else 1
|
||||
newLongest = max(longestStreak, newStreak)
|
||||
|
|
@ -1352,7 +1358,7 @@ class CommcoachService:
|
|||
"streakDays": newStreak,
|
||||
"longestStreak": newLongest,
|
||||
"totalSessions": totalSessions + 1,
|
||||
"lastSessionAt": getIsoTimestamp(),
|
||||
"lastSessionAt": getUtcTimestamp(),
|
||||
})
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to update streak: {e}")
|
||||
|
|
@ -1373,7 +1379,7 @@ class CommcoachService:
|
|||
|
||||
from modules.interfaces.interfaceMessaging import getInterface as getMessagingInterface
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
from modules.shared.notifyMandateAdmins import _renderHtmlEmail, _resolveMandateName
|
||||
from modules.shared.notifyMandateAdmins import renderHtmlEmail, resolveMandateName
|
||||
|
||||
rootInterface = getRootInterface()
|
||||
user = rootInterface.getUser(self.userId)
|
||||
|
|
@ -1382,9 +1388,9 @@ class CommcoachService:
|
|||
|
||||
messaging = getMessagingInterface()
|
||||
subject = f"Coaching-Session Zusammenfassung: {contextTitle}"
|
||||
mandateName = _resolveMandateName(self.mandateId)
|
||||
mandateName = resolveMandateName(self.mandateId)
|
||||
contentHtml = _buildSummaryEmailBlock(emailData, summary, contextTitle)
|
||||
htmlMessage = _renderHtmlEmail(
|
||||
htmlMessage = renderHtmlEmail(
|
||||
"Coaching-Session Zusammenfassung",
|
||||
[
|
||||
f'Thema: {contextTitle}',
|
||||
|
|
@ -1418,14 +1424,13 @@ class CommcoachService:
|
|||
completedSessions = [s for s in allSessions if s.get("status") == CoachingSessionStatus.COMPLETED.value]
|
||||
|
||||
for s in completedSessions:
|
||||
startedAt = s.get("startedAt") or s.get("createdAt") or ""
|
||||
startedAt = s.get("startedAt")
|
||||
if startedAt:
|
||||
try:
|
||||
from datetime import datetime
|
||||
dt = datetime.fromisoformat(str(startedAt).replace("Z", "+00:00"))
|
||||
s["date"] = dt.strftime("%d.%m.%Y")
|
||||
except Exception:
|
||||
s["date"] = ""
|
||||
from datetime import datetime, timezone
|
||||
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||
s["date"] = dt.strftime("%d.%m.%Y")
|
||||
else:
|
||||
s["date"] = ""
|
||||
|
||||
result = {
|
||||
"intent": intent,
|
||||
|
|
@ -1436,7 +1441,7 @@ class CommcoachService:
|
|||
"sessionSummaries": [],
|
||||
}
|
||||
|
||||
ctx = interface.getContext(contextId)
|
||||
ctx = interface.getModule(contextId)
|
||||
rollingOverview = ctx.get("rollingOverview") if ctx else None
|
||||
rollingUpTo = ctx.get("rollingOverviewUpToSessionCount") if ctx else None
|
||||
|
||||
|
|
@ -1513,7 +1518,7 @@ class CommcoachService:
|
|||
)
|
||||
if overviewResponse and overviewResponse.errorCount == 0 and overviewResponse.content:
|
||||
newOverview = overviewResponse.content.strip()
|
||||
interface.updateContext(contextId, {
|
||||
interface.updateModule(contextId, {
|
||||
"rollingOverview": newOverview,
|
||||
"rollingOverviewUpToSessionCount": len(completedSessions),
|
||||
})
|
||||
|
|
|
|||
|
|
@ -206,14 +206,11 @@ Tool-Nutzung:
|
|||
|
||||
if retrievedSession:
|
||||
dateStr = ""
|
||||
startedAt = retrievedSession.get("startedAt") or retrievedSession.get("createdAt")
|
||||
startedAt = retrievedSession.get("startedAt")
|
||||
if startedAt:
|
||||
try:
|
||||
from datetime import datetime
|
||||
dt = datetime.fromisoformat(str(startedAt).replace("Z", "+00:00"))
|
||||
dateStr = dt.strftime("%d.%m.%Y")
|
||||
except Exception:
|
||||
pass
|
||||
from datetime import datetime, timezone
|
||||
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||
dateStr = dt.strftime("%d.%m.%Y")
|
||||
prompt += f"\n\nVom Benutzer angefragte Session ({dateStr}):"
|
||||
prompt += f"\n{retrievedSession.get('summary', '')[:500]}"
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ Intent detection, retrieval strategies, and context assembly for intelligent ses
|
|||
|
||||
import re
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, Dict, Any, List, Tuple
|
||||
from enum import Enum
|
||||
|
||||
|
|
@ -106,18 +106,15 @@ def findSessionByDate(
|
|||
for s in sessions:
|
||||
if s.get("status") != "completed":
|
||||
continue
|
||||
startedAt = s.get("startedAt") or s.get("endedAt") or s.get("createdAt")
|
||||
startedAt = s.get("startedAt") or s.get("endedAt")
|
||||
if not startedAt:
|
||||
continue
|
||||
try:
|
||||
dt = datetime.fromisoformat(startedAt.replace("Z", "+00:00"))
|
||||
sessionDate = dt.date()
|
||||
diff = abs((sessionDate - targetDateOnly).days)
|
||||
if bestDiff is None or diff < bestDiff:
|
||||
bestDiff = diff
|
||||
bestMatch = s
|
||||
except Exception:
|
||||
continue
|
||||
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||
sessionDate = dt.date()
|
||||
diff = abs((sessionDate - targetDateOnly).days)
|
||||
if bestDiff is None or diff < bestDiff:
|
||||
bestDiff = diff
|
||||
bestMatch = s
|
||||
|
||||
return bestMatch
|
||||
|
||||
|
|
@ -231,17 +228,14 @@ def buildSessionSummariesForPrompt(
|
|||
and s.get("summary")
|
||||
and s.get("id") != excludeSessionId
|
||||
]
|
||||
completed.sort(key=lambda x: x.get("startedAt") or x.get("createdAt") or "", reverse=True)
|
||||
completed.sort(key=lambda x: x.get("startedAt") or 0, reverse=True)
|
||||
result = []
|
||||
for s in completed[:limit]:
|
||||
startedAt = s.get("startedAt") or s.get("createdAt") or ""
|
||||
startedAt = s.get("startedAt")
|
||||
dateStr = ""
|
||||
if startedAt:
|
||||
try:
|
||||
dt = datetime.fromisoformat(startedAt.replace("Z", "+00:00"))
|
||||
dateStr = dt.strftime("%d.%m.%Y")
|
||||
except Exception:
|
||||
pass
|
||||
dt = datetime.fromtimestamp(startedAt, tz=timezone.utc)
|
||||
dateStr = dt.strftime("%d.%m.%Y")
|
||||
result.append({
|
||||
"summary": s.get("summary", ""),
|
||||
"date": dateStr,
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ Generates Markdown and PDF exports for dossiers and sessions.
|
|||
import logging
|
||||
import json
|
||||
from typing import Dict, Any, List, Optional
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -49,7 +49,7 @@ def buildDossierMarkdown(context: Dict[str, Any], sessions: List[Dict[str, Any]]
|
|||
lines.append(f"- {text}")
|
||||
|
||||
completedSessions = [s for s in sessions if s.get("status") == "completed"]
|
||||
completedSessions.sort(key=lambda s: s.get("startedAt") or s.get("createdAt") or "")
|
||||
completedSessions.sort(key=lambda s: s.get("startedAt") or 0)
|
||||
if completedSessions:
|
||||
lines += ["", "## Sessions", ""]
|
||||
for i, s in enumerate(completedSessions, 1):
|
||||
|
|
@ -227,14 +227,14 @@ def _mdToXml(text: str) -> str:
|
|||
|
||||
|
||||
|
||||
def _formatDate(isoStr: Optional[str]) -> str:
|
||||
if not isoStr:
|
||||
return datetime.now().strftime("%d.%m.%Y")
|
||||
try:
|
||||
dt = datetime.fromisoformat(str(isoStr).replace("Z", "+00:00"))
|
||||
def _formatDate(val) -> str:
|
||||
if not val:
|
||||
return datetime.now(timezone.utc).strftime("%d.%m.%Y")
|
||||
if isinstance(val, (int, float)):
|
||||
dt = datetime.fromtimestamp(float(val), tz=timezone.utc)
|
||||
return dt.strftime("%d.%m.%Y")
|
||||
except Exception:
|
||||
return isoStr
|
||||
dt = datetime.fromisoformat(str(val).replace("Z", "+00:00"))
|
||||
return dt.strftime("%d.%m.%Y")
|
||||
|
||||
|
||||
def _parseJson(value, fallback):
|
||||
|
|
|
|||
|
|
@ -143,7 +143,7 @@ async def checkAndAwardBadges(interface, userId: str, mandateId: str, instanceId
|
|||
badgesToCheck.append(("roleplay_first", True))
|
||||
|
||||
try:
|
||||
from .datamodelCommcoach import CoachingContextStatus
|
||||
from .datamodelCommcoach import TrainingModuleStatus
|
||||
allContexts = interface.db.getRecordset(
|
||||
interface.db.getRecordset.__self__.__class__.__mro__[0] # avoid import issues
|
||||
) if False else []
|
||||
|
|
|
|||
|
|
@ -174,14 +174,26 @@ async def indexSessionData(
|
|||
for c in chunks
|
||||
]
|
||||
|
||||
await knowledgeService.indexFile(
|
||||
fileId=syntheticFileId,
|
||||
fileName=f"coaching-session-{sessionId[:8]}",
|
||||
mimeType="application/x-coaching-session",
|
||||
userId=userId,
|
||||
featureInstanceId=featureInstanceId,
|
||||
mandateId=mandateId,
|
||||
contentObjects=contentObjects,
|
||||
from modules.serviceCenter.services.serviceKnowledge.mainServiceKnowledge import IngestionJob
|
||||
|
||||
await knowledgeService.requestIngestion(
|
||||
IngestionJob(
|
||||
sourceKind="coaching_session",
|
||||
sourceId=syntheticFileId,
|
||||
fileName=f"coaching-session-{sessionId[:8]}",
|
||||
mimeType="application/x-coaching-session",
|
||||
userId=userId,
|
||||
featureInstanceId=featureInstanceId,
|
||||
mandateId=mandateId,
|
||||
contentObjects=contentObjects,
|
||||
provenance={
|
||||
"lane": "feature",
|
||||
"feature": "commcoach",
|
||||
"sessionId": sessionId,
|
||||
"contextId": contextId,
|
||||
"messageCount": len(messages or []),
|
||||
},
|
||||
)
|
||||
)
|
||||
logger.info(f"Successfully indexed coaching session {sessionId} ({len(chunks)} chunks)")
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -146,6 +146,57 @@ BUILTIN_PERSONAS: List[Dict[str, Any]] = [
|
|||
"gender": "m",
|
||||
"category": "builtin",
|
||||
},
|
||||
# --- Fachpersonen / Therapeutische & rechtliche Gesprächspartner ---
|
||||
{
|
||||
"key": "couples_therapist_f",
|
||||
"label": "Paartherapeutin",
|
||||
"description": "Dr. Eva Roth, erfahrene Paartherapeutin. Empathisch, strukturiert, stellt gezielte Fragen zu "
|
||||
"Beziehungsdynamiken. Spiegelt Gefühle und Muster, ohne Partei zu ergreifen. Arbeitet mit der "
|
||||
"Gewaltfreien Kommunikation und systemischen Methoden. Fragt nach Bedürfnissen hinter Vorwürfen "
|
||||
"und lenkt das Gespräch auf konkrete Verhaltensänderungen statt Schuldzuweisungen.",
|
||||
"gender": "f",
|
||||
"category": "builtin",
|
||||
},
|
||||
{
|
||||
"key": "psychologist_m",
|
||||
"label": "Psychologe",
|
||||
"description": "Dr. Markus Frei, klinischer Psychologe mit Schwerpunkt Stressbewältigung und Burnout-Prävention. "
|
||||
"Ruhig, geduldig, stellt offene Fragen zur Selbstreflexion. Erkennt Denkmuster und benennt sie "
|
||||
"behutsam. Arbeitet lösungsorientiert und hilft bei der Identifikation von Stressoren, Ressourcen "
|
||||
"und Bewältigungsstrategien. Drängt nicht, lässt Raum für Stille und Nachdenken.",
|
||||
"gender": "m",
|
||||
"category": "builtin",
|
||||
},
|
||||
{
|
||||
"key": "lawyer_m",
|
||||
"label": "Rechtsanwalt",
|
||||
"description": "lic. iur. Daniel Brandt, Wirtschaftsanwalt mit Fokus auf Vertragsrecht und Arbeitsrecht. Sachlich, "
|
||||
"analytisch, prüft jede Aussage auf juristische Stichhaltigkeit. Fragt nach Fakten, Fristen und "
|
||||
"Beweislage. Weist auf Risiken und Haftungsfragen hin. Formuliert präzise und erwartet dasselbe "
|
||||
"vom Gegenüber. Kann unangenehme rechtliche Realitäten nüchtern kommunizieren.",
|
||||
"gender": "m",
|
||||
"category": "builtin",
|
||||
},
|
||||
{
|
||||
"key": "mediator_f",
|
||||
"label": "Mediatorin",
|
||||
"description": "Sabine Lang, zertifizierte Wirtschaftsmediatorin. Strikt neutral, strukturiert den Dialog zwischen "
|
||||
"Konfliktparteien. Stellt sicher, dass beide Seiten gehört werden. Arbeitet mit Ich-Botschaften und "
|
||||
"Interessenklärung statt Positionsverhandlung. Unterbricht respektvoll bei Eskalation und lenkt "
|
||||
"zurück auf Sachebene. Ziel ist immer eine tragfähige Vereinbarung, nicht Recht oder Unrecht.",
|
||||
"gender": "f",
|
||||
"category": "builtin",
|
||||
},
|
||||
{
|
||||
"key": "hr_manager_f",
|
||||
"label": "HR-Managerin",
|
||||
"description": "Kathrin Vogt, Head of HR in einem Konzern. Kennt Arbeitsrecht, Feedbackkultur und Change-Prozesse. "
|
||||
"Spricht diplomatisch aber klar. Achtet auf Compliance und Gleichbehandlung. Erwartet strukturierte "
|
||||
"Argumentation bei Personalentscheiden. Reagiert sensibel auf Diskriminierungs- oder Mobbingthemen. "
|
||||
"Kann sowohl Arbeitgeber- als auch Arbeitnehmerperspektive einnehmen.",
|
||||
"gender": "f",
|
||||
"category": "builtin",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -62,9 +62,9 @@ async def _runDailyReminders():
|
|||
try:
|
||||
from modules.shared.configuration import APP_CONFIG
|
||||
from modules.connectors.connectorDbPostgre import DatabaseConnector
|
||||
from .datamodelCommcoach import CoachingUserProfile, CoachingContextStatus
|
||||
from .datamodelCommcoach import CoachingUserProfile, TrainingModuleStatus
|
||||
from modules.interfaces.interfaceMessaging import getInterface as getMessagingInterface
|
||||
from modules.shared.notifyMandateAdmins import _renderHtmlEmail, _resolveMandateName
|
||||
from modules.shared.notifyMandateAdmins import renderHtmlEmail, resolveMandateName
|
||||
|
||||
dbHost = APP_CONFIG.get("DB_HOST", "_no_config_default_data")
|
||||
db = DatabaseConnector(
|
||||
|
|
@ -94,10 +94,10 @@ async def _runDailyReminders():
|
|||
continue
|
||||
|
||||
# Check if user has active contexts
|
||||
from .datamodelCommcoach import CoachingContext
|
||||
contexts = db.getRecordset(CoachingContext, recordFilter={
|
||||
from .datamodelCommcoach import TrainingModule
|
||||
contexts = db.getRecordset(TrainingModule, recordFilter={
|
||||
"userId": userId,
|
||||
"status": CoachingContextStatus.ACTIVE.value,
|
||||
"status": TrainingModuleStatus.ACTIVE.value,
|
||||
})
|
||||
if not contexts:
|
||||
continue
|
||||
|
|
@ -106,8 +106,8 @@ async def _runDailyReminders():
|
|||
contextList = ", ".join(contextTitles)
|
||||
|
||||
subject = "Dein tägliches Coaching wartet"
|
||||
mandateName = _resolveMandateName(profile.get("mandateId"))
|
||||
htmlMessage = _renderHtmlEmail(
|
||||
mandateName = resolveMandateName(profile.get("mandateId"))
|
||||
htmlMessage = renderHtmlEmail(
|
||||
"Zeit für dein tägliches Coaching",
|
||||
[
|
||||
f"Du hast aktuell {len(contexts)} aktive Coaching-Themen.",
|
||||
|
|
|
|||
205
modules/features/graphicalEditor/adapterValidator.py
Normal file
205
modules/features/graphicalEditor/adapterValidator.py
Normal file
|
|
@ -0,0 +1,205 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
Adapter Validator — enforces 5 drift rules between Schicht-3 NodeAdapters
|
||||
and the Schicht-2 Actions they bind to.
|
||||
|
||||
This is the CI-safety net described in the typed-action-architecture plan:
|
||||
any drift between an Editor-Node Adapter and the underlying Action signature
|
||||
must be caught at build time, never silently in production.
|
||||
|
||||
Rules
|
||||
-----
|
||||
1. Every `userParams[].actionArg` exists as a parameter in the bound Action.
|
||||
2. Every required Action parameter is covered by either `userParams` or
|
||||
`contextParams` (i.e. no required arg is silently unset).
|
||||
3. Every Action parameter type exists in PORT_TYPE_CATALOG (or is a primitive).
|
||||
4. The Action `outputType` exists in PORT_TYPE_CATALOG (or is a primitive).
|
||||
5. Every method-bound STATIC node has an Adapter (no orphan node ids).
|
||||
|
||||
Rules 3+4 are already enforced by `_actionSignatureValidator` in Phase 2 —
|
||||
this module composes with it so the report covers both layers.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict, List, Mapping
|
||||
|
||||
from modules.features.graphicalEditor.nodeAdapter import (
|
||||
NodeAdapter,
|
||||
_adapterFromLegacyNode,
|
||||
_isMethodBoundNode,
|
||||
)
|
||||
from modules.workflows.methods._actionSignatureValidator import _validateTypeRef
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdapterValidationReport:
|
||||
"""Aggregated drift report across all adapters."""
|
||||
|
||||
errors: List[str] = field(default_factory=list)
|
||||
warnings: List[str] = field(default_factory=list)
|
||||
|
||||
@property
|
||||
def isHealthy(self) -> bool:
|
||||
return not self.errors
|
||||
|
||||
def merge(self, other: "AdapterValidationReport") -> None:
|
||||
self.errors.extend(other.errors)
|
||||
self.warnings.extend(other.warnings)
|
||||
|
||||
|
||||
def _validateAdapterAgainstAction(
|
||||
adapter: NodeAdapter,
|
||||
actionDef: Any,
|
||||
) -> AdapterValidationReport:
|
||||
"""Apply rules 1-4 to a single Adapter / Action pair.
|
||||
|
||||
`actionDef` is duck-typed so tests can pass dataclasses; production passes
|
||||
a `WorkflowActionDefinition` Pydantic model.
|
||||
"""
|
||||
report = AdapterValidationReport()
|
||||
actionParams: Mapping[str, Any] = getattr(actionDef, "parameters", {}) or {}
|
||||
outputType: str = getattr(actionDef, "outputType", "ActionResult") or "ActionResult"
|
||||
|
||||
# Rule 1: every userParam.actionArg exists in the Action
|
||||
declaredArgs = {up.actionArg for up in adapter.userParams}
|
||||
for arg in declaredArgs:
|
||||
if arg not in actionParams:
|
||||
report.errors.append(
|
||||
f"adapter '{adapter.nodeId}' bindsAction '{adapter.bindsAction}': "
|
||||
f"userParams.actionArg '{arg}' does not exist in action parameters "
|
||||
f"(known: {sorted(actionParams.keys())})"
|
||||
)
|
||||
|
||||
# Rule 2: every required Action arg is covered (userParams OR contextParams)
|
||||
coveredArgs = declaredArgs | set(adapter.contextParams.keys())
|
||||
for paramName, paramDef in actionParams.items():
|
||||
isRequired = bool(getattr(paramDef, "required", False))
|
||||
if isRequired and paramName not in coveredArgs:
|
||||
report.errors.append(
|
||||
f"adapter '{adapter.nodeId}' bindsAction '{adapter.bindsAction}': "
|
||||
f"required action arg '{paramName}' is neither in userParams nor contextParams"
|
||||
)
|
||||
|
||||
# Rule 3: every Action parameter type exists in catalog (re-runs Phase-2 rule)
|
||||
for paramName, paramDef in actionParams.items():
|
||||
typeRef = getattr(paramDef, "type", None)
|
||||
if not typeRef:
|
||||
report.errors.append(
|
||||
f"action '{adapter.bindsAction}.{paramName}': missing 'type' on parameter"
|
||||
)
|
||||
continue
|
||||
for err in _validateTypeRef(typeRef):
|
||||
report.errors.append(
|
||||
f"action '{adapter.bindsAction}.{paramName}': {err}"
|
||||
)
|
||||
|
||||
# Rule 4: Action outputType exists in catalog (or is a generic fire-and-forget type)
|
||||
if outputType not in {"ActionResult", "Transit"}:
|
||||
for err in _validateTypeRef(outputType):
|
||||
report.errors.append(
|
||||
f"action '{adapter.bindsAction}'.outputType: {err}"
|
||||
)
|
||||
|
||||
return report
|
||||
|
||||
|
||||
def _validateAllAdapters(
|
||||
staticNodes: List[Mapping[str, Any]],
|
||||
actionsRegistry: Mapping[str, Mapping[str, Any]],
|
||||
) -> AdapterValidationReport:
|
||||
"""Run rules 1-5 across all method-bound static node definitions.
|
||||
|
||||
Args:
|
||||
staticNodes: list of legacy node-dicts (`STATIC_NODE_TYPES`).
|
||||
actionsRegistry: mapping of method-shortname -> {actionName: WorkflowActionDefinition}.
|
||||
Built from live `methods` registry or test-stubbed methods.
|
||||
|
||||
Returns:
|
||||
Aggregated drift report. `isHealthy` is True only if every method-bound
|
||||
node has a matching Action and all 5 rules pass.
|
||||
"""
|
||||
report = AdapterValidationReport()
|
||||
seenAdapterIds: set[str] = set()
|
||||
|
||||
for node in staticNodes:
|
||||
if not _isMethodBoundNode(node):
|
||||
continue
|
||||
|
||||
adapter = _adapterFromLegacyNode(node)
|
||||
if adapter is None:
|
||||
report.errors.append(
|
||||
f"node '{node.get('id')}' is method-bound but adapter projection failed"
|
||||
)
|
||||
continue
|
||||
seenAdapterIds.add(adapter.nodeId)
|
||||
|
||||
methodName = str(node.get("_method") or "")
|
||||
actionName = str(node.get("_action") or "")
|
||||
methodActions = actionsRegistry.get(methodName) or {}
|
||||
actionDef = methodActions.get(actionName)
|
||||
if actionDef is None:
|
||||
report.errors.append(
|
||||
f"adapter '{adapter.nodeId}' bindsAction '{adapter.bindsAction}': "
|
||||
f"action not found in registry (method '{methodName}' has actions: "
|
||||
f"{sorted(methodActions.keys())})"
|
||||
)
|
||||
continue
|
||||
|
||||
report.merge(_validateAdapterAgainstAction(adapter, actionDef))
|
||||
|
||||
# Rule 5: every Action with dynamicMode=False MUST have an Editor Adapter.
|
||||
# dynamicMode=True actions are agent-only and may legitimately lack one.
|
||||
boundActions: set[str] = set()
|
||||
for node in staticNodes:
|
||||
if not _isMethodBoundNode(node):
|
||||
continue
|
||||
boundActions.add(f"{node.get('_method')}.{node.get('_action')}")
|
||||
|
||||
for methodName, actions in actionsRegistry.items():
|
||||
for actionName, actionDef in actions.items():
|
||||
if bool(getattr(actionDef, "dynamicMode", False)):
|
||||
continue
|
||||
fqn = f"{methodName}.{actionName}"
|
||||
if fqn not in boundActions:
|
||||
report.warnings.append(
|
||||
f"action '{fqn}' has no Editor adapter "
|
||||
f"(set dynamicMode=True if intended as agent-only)"
|
||||
)
|
||||
|
||||
return report
|
||||
|
||||
|
||||
def _formatAdapterReport(report: AdapterValidationReport) -> str:
|
||||
"""Format a report for human-readable logging."""
|
||||
lines: List[str] = []
|
||||
if report.isHealthy and not report.warnings:
|
||||
lines.append("Adapter validator: all healthy.")
|
||||
return "\n".join(lines)
|
||||
|
||||
if report.errors:
|
||||
lines.append(f"Adapter validator: {len(report.errors)} ERROR(s)")
|
||||
for e in report.errors:
|
||||
lines.append(f" ERROR: {e}")
|
||||
if report.warnings:
|
||||
lines.append(f"Adapter validator: {len(report.warnings)} WARNING(s)")
|
||||
for w in report.warnings:
|
||||
lines.append(f" WARN: {w}")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def _buildActionsRegistryFromMethods(
|
||||
methodInstances: Mapping[str, Any],
|
||||
) -> Dict[str, Dict[str, Any]]:
|
||||
"""Convenience: turn `{shortName: methodInstance}` into the registry shape.
|
||||
|
||||
`methodInstance._actions` is a dict of action-name -> WorkflowActionDefinition.
|
||||
"""
|
||||
registry: Dict[str, Dict[str, Any]] = {}
|
||||
for shortName, instance in methodInstances.items():
|
||||
actions = getattr(instance, "_actions", None)
|
||||
if isinstance(actions, dict):
|
||||
registry[shortName] = dict(actions)
|
||||
return registry
|
||||
|
|
@ -68,23 +68,28 @@ class AutoWorkflow(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandanten-ID",
|
||||
"frontend_fk_source": "/api/mandates/",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_model": "Mandate",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
description="Feature instance ID",
|
||||
description="Feature instance ID (GE owner instance / RBAC scope)",
|
||||
json_schema_extra={
|
||||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Feature-Instanz-ID",
|
||||
"frontend_fk_source": "/api/features/instances",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_model": "FeatureInstance",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
targetFeatureInstanceId: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Target feature instance for execution data scope. NULL for templates, mandatory for non-templates.",
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"label": "Ziel-Instanz",
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
label: str = Field(
|
||||
|
|
@ -104,7 +109,12 @@ class AutoWorkflow(PowerOnModel):
|
|||
isTemplate: bool = Field(
|
||||
default=False,
|
||||
description="Whether this workflow is a template",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Ist Vorlage"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_required": False,
|
||||
"label": "Ist Vorlage",
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
templateSourceId: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -114,18 +124,43 @@ class AutoWorkflow(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Vorlagen-Quelle",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
||||
# Soft FK: holds either a real AutoWorkflow.id (UUID, when copied
|
||||
# from a stored template) OR an in-code sentinel like
|
||||
# "trustee-receipt-import" (when bootstrapped from
|
||||
# featureModule.getTemplateWorkflows()). Sentinel values do not
|
||||
# exist as DB rows by design — orphan cleanup MUST skip this column.
|
||||
"fk_target": {
|
||||
"db": "poweron_graphicaleditor",
|
||||
"table": "AutoWorkflow",
|
||||
"labelField": "label",
|
||||
"softFk": True,
|
||||
},
|
||||
},
|
||||
)
|
||||
templateScope: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Template scope: user, instance, mandate, system (AutoTemplateScope)",
|
||||
json_schema_extra={"frontend_type": "select", "frontend_required": False, "label": "Vorlagen-Bereich"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_required": False,
|
||||
"label": "Vorlagen-Bereich",
|
||||
"frontend_options": [
|
||||
{"value": "user", "label": "Meine"},
|
||||
{"value": "instance", "label": "Instanz"},
|
||||
{"value": "mandate", "label": "Mandant"},
|
||||
{"value": "system", "label": "System"},
|
||||
],
|
||||
},
|
||||
)
|
||||
sharedReadOnly: bool = Field(
|
||||
default=False,
|
||||
description="If true, shared template is read-only for non-owners",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Freigabe nur-lesen"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_required": False,
|
||||
"label": "Freigabe nur-lesen",
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
currentVersionId: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -135,13 +170,18 @@ class AutoWorkflow(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Aktuelle Version",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion", "labelField": "versionNumber"},
|
||||
},
|
||||
)
|
||||
active: bool = Field(
|
||||
default=True,
|
||||
description="Whether workflow is active",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Aktiv"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_required": False,
|
||||
"label": "Aktiv",
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
eventId: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -151,7 +191,12 @@ class AutoWorkflow(PowerOnModel):
|
|||
notifyOnFailure: bool = Field(
|
||||
default=True,
|
||||
description="Send notification (in-app + email) when a run fails",
|
||||
json_schema_extra={"frontend_type": "checkbox", "frontend_required": False, "label": "Bei Fehler benachrichtigen"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "checkbox",
|
||||
"frontend_required": False,
|
||||
"label": "Bei Fehler benachrichtigen",
|
||||
"frontend_format_labels": ["Ja", "-", "Nein"],
|
||||
},
|
||||
)
|
||||
# Legacy fields kept for backward compatibility during transition
|
||||
graph: Dict[str, Any] = Field(
|
||||
|
|
@ -184,7 +229,7 @@ class AutoVersion(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"label": "Workflow-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
versionNumber: int = Field(
|
||||
|
|
@ -195,7 +240,16 @@ class AutoVersion(PowerOnModel):
|
|||
status: str = Field(
|
||||
default=AutoWorkflowStatus.DRAFT.value,
|
||||
description="Version status: draft, published, archived",
|
||||
json_schema_extra={"frontend_type": "select", "frontend_required": False, "label": "Status"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_required": False,
|
||||
"label": "Status",
|
||||
"frontend_options": [
|
||||
{"value": "draft", "label": "Entwurf"},
|
||||
{"value": "published", "label": "Veröffentlicht"},
|
||||
{"value": "archived", "label": "Archiviert"},
|
||||
],
|
||||
},
|
||||
)
|
||||
graph: Dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
|
|
@ -210,7 +264,7 @@ class AutoVersion(PowerOnModel):
|
|||
publishedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Timestamp when version was published",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Veröffentlicht am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Veröffentlicht am"},
|
||||
)
|
||||
publishedBy: Optional[str] = Field(
|
||||
default=None,
|
||||
|
|
@ -220,7 +274,7 @@ class AutoVersion(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Veröffentlicht von",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -243,7 +297,7 @@ class AutoRun(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"label": "Workflow-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
label: Optional[str] = Field(
|
||||
|
|
@ -259,10 +313,7 @@ class AutoRun(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Mandanten-ID",
|
||||
"frontend_fk_source": "/api/mandates/",
|
||||
"frontend_fk_display_field": "label",
|
||||
"fk_model": "Mandate",
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
ownerId: Optional[str] = Field(
|
||||
|
|
@ -273,7 +324,7 @@ class AutoRun(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Auslöser",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
versionId: Optional[str] = Field(
|
||||
|
|
@ -284,13 +335,24 @@ class AutoRun(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"label": "Versions-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoVersion", "labelField": "versionNumber"},
|
||||
},
|
||||
)
|
||||
status: str = Field(
|
||||
default=AutoRunStatus.RUNNING.value,
|
||||
description="Status: running, paused, completed, failed, cancelled",
|
||||
json_schema_extra={"frontend_type": "text", "frontend_required": False, "label": "Status"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_required": False,
|
||||
"label": "Status",
|
||||
"frontend_options": [
|
||||
{"value": "running", "label": "Läuft"},
|
||||
{"value": "paused", "label": "Pausiert"},
|
||||
{"value": "completed", "label": "Abgeschlossen"},
|
||||
{"value": "failed", "label": "Fehlgeschlagen"},
|
||||
{"value": "cancelled", "label": "Abgebrochen"},
|
||||
],
|
||||
},
|
||||
)
|
||||
trigger: Dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
|
|
@ -300,12 +362,12 @@ class AutoRun(PowerOnModel):
|
|||
startedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Run start timestamp",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
||||
)
|
||||
completedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Run completion timestamp",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
||||
)
|
||||
nodeOutputs: Dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
|
|
@ -357,7 +419,7 @@ class AutoStepLog(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"label": "Lauf-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
nodeId: str = Field(
|
||||
|
|
@ -371,7 +433,18 @@ class AutoStepLog(PowerOnModel):
|
|||
status: str = Field(
|
||||
default=AutoStepStatus.PENDING.value,
|
||||
description="Step status: pending, running, completed, failed, skipped",
|
||||
json_schema_extra={"frontend_type": "text", "frontend_required": False, "label": "Status"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_required": False,
|
||||
"label": "Status",
|
||||
"frontend_options": [
|
||||
{"value": "pending", "label": "Wartend"},
|
||||
{"value": "running", "label": "Läuft"},
|
||||
{"value": "completed", "label": "Abgeschlossen"},
|
||||
{"value": "failed", "label": "Fehlgeschlagen"},
|
||||
{"value": "skipped", "label": "Übersprungen"},
|
||||
],
|
||||
},
|
||||
)
|
||||
inputSnapshot: Dict[str, Any] = Field(
|
||||
default_factory=dict,
|
||||
|
|
@ -391,12 +464,12 @@ class AutoStepLog(PowerOnModel):
|
|||
startedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Step start timestamp",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Gestartet am"},
|
||||
)
|
||||
completedAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Step completion timestamp",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_readonly": True, "frontend_required": False, "label": "Abgeschlossen am"},
|
||||
)
|
||||
durationMs: Optional[int] = Field(
|
||||
default=None,
|
||||
|
|
@ -433,7 +506,7 @@ class AutoTask(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"label": "Lauf-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoRun", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
workflowId: str = Field(
|
||||
|
|
@ -443,7 +516,7 @@ class AutoTask(PowerOnModel):
|
|||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"label": "Workflow-ID",
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow"},
|
||||
"fk_target": {"db": "poweron_graphicaleditor", "table": "AutoWorkflow", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
nodeId: str = Field(
|
||||
|
|
@ -467,13 +540,23 @@ class AutoTask(PowerOnModel):
|
|||
"frontend_readonly": False,
|
||||
"frontend_required": False,
|
||||
"label": "Zugewiesen an",
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
status: str = Field(
|
||||
default=AutoTaskStatus.PENDING.value,
|
||||
description="Status: pending, completed, cancelled, expired",
|
||||
json_schema_extra={"frontend_type": "text", "frontend_required": False, "label": "Status"},
|
||||
json_schema_extra={
|
||||
"frontend_type": "select",
|
||||
"frontend_required": False,
|
||||
"label": "Status",
|
||||
"frontend_options": [
|
||||
{"value": "pending", "label": "Wartend"},
|
||||
{"value": "completed", "label": "Abgeschlossen"},
|
||||
{"value": "cancelled", "label": "Abgebrochen"},
|
||||
{"value": "expired", "label": "Abgelaufen"},
|
||||
],
|
||||
},
|
||||
)
|
||||
result: Optional[Dict[str, Any]] = Field(
|
||||
default=None,
|
||||
|
|
@ -483,7 +566,7 @@ class AutoTask(PowerOnModel):
|
|||
expiresAt: Optional[float] = Field(
|
||||
default=None,
|
||||
description="Expiration timestamp for the task",
|
||||
json_schema_extra={"frontend_type": "datetime", "frontend_required": False, "label": "Läuft ab am"},
|
||||
json_schema_extra={"frontend_type": "timestamp", "frontend_required": False, "label": "Läuft ab am"},
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -7,21 +7,35 @@ Uses PostgreSQL poweron_graphicaleditor database (Greenfield).
|
|||
|
||||
import base64
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
from typing import Dict, Any, List, Optional
|
||||
|
||||
|
||||
def _make_json_serializable(obj: Any) -> Any:
|
||||
_INTERNAL_SKIP_KEYS = frozenset({"_context", "_orderedNodes"})
|
||||
|
||||
|
||||
def _make_json_serializable(obj: Any, _depth: int = 0) -> Any:
|
||||
"""
|
||||
Recursively convert bytes to base64 strings so structures can be JSON-serialized
|
||||
for storage in JSONB columns.
|
||||
|
||||
Internal runtime keys (_context, _orderedNodes) are skipped — they hold live
|
||||
Python objects (including back-references to nodeOutputs) and must never be
|
||||
stored. A depth guard prevents runaway recursion on unexpected circular refs.
|
||||
"""
|
||||
if _depth > 50:
|
||||
return None
|
||||
if isinstance(obj, bytes):
|
||||
return base64.b64encode(obj).decode("ascii")
|
||||
if isinstance(obj, dict):
|
||||
return {k: _make_json_serializable(v) for k, v in obj.items()}
|
||||
return {
|
||||
k: _make_json_serializable(v, _depth + 1)
|
||||
for k, v in obj.items()
|
||||
if k not in _INTERNAL_SKIP_KEYS
|
||||
}
|
||||
if isinstance(obj, list):
|
||||
return [_make_json_serializable(v) for v in obj]
|
||||
return [_make_json_serializable(v, _depth + 1) for v in obj]
|
||||
return obj
|
||||
|
||||
from modules.datamodels.datamodelUam import User
|
||||
|
|
@ -216,6 +230,8 @@ class GraphicalEditorObjects:
|
|||
data["id"] = str(uuid.uuid4())
|
||||
data["mandateId"] = self.mandateId
|
||||
data["featureInstanceId"] = self.featureInstanceId
|
||||
if not data.get("targetFeatureInstanceId") and not data.get("isTemplate"):
|
||||
data["targetFeatureInstanceId"] = self.featureInstanceId
|
||||
if "active" not in data or data.get("active") is None:
|
||||
data["active"] = True
|
||||
data["invocations"] = normalize_invocations_list(data.get("invocations"))
|
||||
|
|
@ -278,6 +294,7 @@ class GraphicalEditorObjects:
|
|||
"workflowId": workflowId,
|
||||
"label": label,
|
||||
"status": "running",
|
||||
"startedAt": time.time(),
|
||||
"nodeOutputs": _make_json_serializable(nodeOutputs or {}),
|
||||
"currentNodeId": None,
|
||||
"context": ctx,
|
||||
|
|
@ -314,6 +331,8 @@ class GraphicalEditorObjects:
|
|||
updates = {}
|
||||
if status is not None:
|
||||
updates["status"] = status
|
||||
if status in ("completed", "failed", "stopped", "cancelled") and not run.get("completedAt"):
|
||||
updates["completedAt"] = time.time()
|
||||
if nodeOutputs is not None:
|
||||
updates["nodeOutputs"] = _make_json_serializable(nodeOutputs)
|
||||
if currentNodeId is not None:
|
||||
|
|
|
|||
172
modules/features/graphicalEditor/nodeAdapter.py
Normal file
172
modules/features/graphicalEditor/nodeAdapter.py
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# All rights reserved.
|
||||
"""
|
||||
Schicht-3 Adapter Layer — projects Schicht-2 Actions into Editor-Node form.
|
||||
|
||||
Architecture (see wiki/c-work/1-plan/2026-04-typed-action-architecture.md):
|
||||
- Schicht 1: Types Catalog (portTypes.PORT_TYPE_CATALOG)
|
||||
- Schicht 2: Methods/Actions (modules/workflows/methods/method*) - source of truth
|
||||
for Backend capabilities (parameter types, output types).
|
||||
- Schicht 3: Adapters (this module) - Editor-Node + AI-Agent-Tool wrappers around
|
||||
Actions. References Action signature, never duplicates types.
|
||||
- Schicht 4: Workflow-Bindings + Agent-Tool-Calls (instance-level wiring).
|
||||
|
||||
This module defines the in-code Adapter representation (NodeAdapter,
|
||||
UserParamMapping) and the projection helpers that convert between the
|
||||
legacy node-dict wire format and the typed Adapter view.
|
||||
|
||||
Wire-format compatibility: the legacy dicts in nodeDefinitions/*.py remain
|
||||
the wire format consumed by the frontend until Phase 4. This module exposes
|
||||
an Adapter VIEW over those dicts so the validator and AI-tool generator can
|
||||
operate on a clean, typed structure without breaking consumers.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict, List, Mapping, Optional
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class UserParamMapping:
|
||||
"""Maps an Action argument into a Node's user-facing parameter.
|
||||
|
||||
The Action signature is the source of truth for type/required/description.
|
||||
This mapping carries Editor-specific overrides (label, UI hints, conditional
|
||||
visibility) but never re-declares the type.
|
||||
"""
|
||||
|
||||
actionArg: str
|
||||
label: Optional[Any] = None
|
||||
description: Optional[Any] = None
|
||||
uiHint: Optional[str] = None
|
||||
frontendOptions: Optional[Any] = None
|
||||
visibleWhen: Optional[Dict[str, Any]] = None
|
||||
defaultValue: Any = None
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class NodeAdapter:
|
||||
"""Schicht-3 Editor-Node adapter — binds to a Schicht-2 Action.
|
||||
|
||||
All type information for `userParams` is inherited from the bound Action.
|
||||
The adapter only carries Editor-specific concerns (UI labels, port topology,
|
||||
icon/color metadata).
|
||||
"""
|
||||
|
||||
nodeId: str
|
||||
bindsAction: str
|
||||
category: str
|
||||
label: Any
|
||||
description: Any
|
||||
userParams: List[UserParamMapping] = field(default_factory=list)
|
||||
contextParams: Dict[str, str] = field(default_factory=dict)
|
||||
inputs: int = 1
|
||||
outputs: int = 1
|
||||
inputAccepts: List[List[str]] = field(default_factory=list)
|
||||
outputLabels: Optional[List[Any]] = None
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
def _isMethodBoundNode(node: Mapping[str, Any]) -> bool:
|
||||
"""True if a legacy node dict is bound to a Schicht-2 Action."""
|
||||
return bool(node.get("_method") and node.get("_action"))
|
||||
|
||||
|
||||
def bindsActionFromLegacy(node: Mapping[str, Any]) -> Optional[str]:
|
||||
"""Build the canonical 'method.action' identifier from a legacy node dict.
|
||||
|
||||
Returns None for framework-primitive nodes (trigger/flow/input/data).
|
||||
"""
|
||||
method = node.get("_method")
|
||||
action = node.get("_action")
|
||||
if not method or not action:
|
||||
return None
|
||||
return f"{method}.{action}"
|
||||
|
||||
|
||||
def _userParamFromLegacyParam(legacyParam: Mapping[str, Any]) -> UserParamMapping:
|
||||
"""Project a legacy parameter dict into a UserParamMapping view.
|
||||
|
||||
The view carries only Editor-overrides; type/required come from the Action.
|
||||
"""
|
||||
return UserParamMapping(
|
||||
actionArg=str(legacyParam.get("name", "")),
|
||||
label=legacyParam.get("label"),
|
||||
description=legacyParam.get("description"),
|
||||
uiHint=legacyParam.get("frontendType"),
|
||||
frontendOptions=legacyParam.get("frontendOptions"),
|
||||
visibleWhen=_extractVisibleWhen(legacyParam.get("frontendOptions")),
|
||||
defaultValue=legacyParam.get("default"),
|
||||
)
|
||||
|
||||
|
||||
def _extractVisibleWhen(frontendOptions: Any) -> Optional[Dict[str, Any]]:
|
||||
"""Extract conditional-visibility hint from legacy frontendOptions.showWhen."""
|
||||
if not isinstance(frontendOptions, dict):
|
||||
return None
|
||||
dependsOn = frontendOptions.get("dependsOn")
|
||||
showWhen = frontendOptions.get("showWhen")
|
||||
if not dependsOn or not showWhen:
|
||||
return None
|
||||
return {"actionArg": str(dependsOn), "in": list(showWhen) if isinstance(showWhen, (list, tuple)) else [showWhen]}
|
||||
|
||||
|
||||
def _adapterFromLegacyNode(node: Mapping[str, Any]) -> Optional[NodeAdapter]:
|
||||
"""Build a NodeAdapter view from a legacy node dict.
|
||||
|
||||
Returns None for framework-primitive nodes (no _method/_action binding).
|
||||
Pure projection — no validation, no Action-signature lookup.
|
||||
"""
|
||||
if not _isMethodBoundNode(node):
|
||||
return None
|
||||
|
||||
bindsAction = bindsActionFromLegacy(node)
|
||||
if not bindsAction:
|
||||
return None
|
||||
|
||||
inputAccepts = _projectInputAccepts(node)
|
||||
|
||||
return NodeAdapter(
|
||||
nodeId=str(node.get("id", "")),
|
||||
bindsAction=bindsAction,
|
||||
category=str(node.get("category", "")),
|
||||
label=node.get("label", ""),
|
||||
description=node.get("description", ""),
|
||||
userParams=[_userParamFromLegacyParam(p) for p in (node.get("parameters") or [])],
|
||||
contextParams={},
|
||||
inputs=int(node.get("inputs", 1)),
|
||||
outputs=int(node.get("outputs", 1)),
|
||||
inputAccepts=inputAccepts,
|
||||
outputLabels=node.get("outputLabels"),
|
||||
meta=dict(node.get("meta") or {}),
|
||||
)
|
||||
|
||||
|
||||
def _projectInputAccepts(node: Mapping[str, Any]) -> List[List[str]]:
|
||||
"""Convert legacy `inputPorts` dict-of-dicts into a per-port `accepts` list."""
|
||||
inputPorts = node.get("inputPorts") or {}
|
||||
if not isinstance(inputPorts, dict):
|
||||
return []
|
||||
inputs = int(node.get("inputs", 0) or 0)
|
||||
if inputs <= 0:
|
||||
return []
|
||||
out: List[List[str]] = []
|
||||
for portIdx in range(inputs):
|
||||
portCfg = inputPorts.get(portIdx) or inputPorts.get(str(portIdx)) or {}
|
||||
accepts = portCfg.get("accepts") if isinstance(portCfg, dict) else None
|
||||
out.append(list(accepts) if isinstance(accepts, (list, tuple)) else [])
|
||||
return out
|
||||
|
||||
|
||||
def _projectAllAdapters(staticNodes: List[Mapping[str, Any]]) -> Dict[str, NodeAdapter]:
|
||||
"""Project a list of legacy node dicts into a {nodeId: NodeAdapter} map.
|
||||
|
||||
Framework-primitive nodes (no Action binding) are silently skipped.
|
||||
"""
|
||||
out: Dict[str, NodeAdapter] = {}
|
||||
for node in staticNodes:
|
||||
adapter = _adapterFromLegacyNode(node)
|
||||
if adapter is not None:
|
||||
out[adapter.nodeId] = adapter
|
||||
return out
|
||||
|
|
@ -3,6 +3,136 @@
|
|||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
from modules.features.graphicalEditor.nodeDefinitions.contextPickerHelp import (
|
||||
CONTEXT_BUILDER_PARAM_DESCRIPTION,
|
||||
)
|
||||
|
||||
# Shared authoritative DataPicker paths (same handover idea as ``context.extractContent`` outputPorts).
|
||||
ACTION_RESULT_DATA_PICK_OPTIONS = [
|
||||
{
|
||||
"path": ["documents", 0, "documentData"],
|
||||
"pickerLabel": t("Gesamter Inhalt"),
|
||||
"detail": t(
|
||||
"Strukturiertes Handover als JSON inklusive aller Textteile "
|
||||
"und Verweisen auf ausgelagerte Bilder."
|
||||
),
|
||||
"recommended": True,
|
||||
"type": "Any",
|
||||
},
|
||||
{
|
||||
"path": ["response"],
|
||||
"pickerLabel": t("Nur Text"),
|
||||
"detail": t("Verketteter Klartext aus allen erkannten Textteilen."),
|
||||
"recommended": True,
|
||||
"type": "str",
|
||||
},
|
||||
{
|
||||
"path": ["imageDocumentsOnly"],
|
||||
"pickerLabel": t("Nur Bilder"),
|
||||
"detail": t("Nur die extrahierten Bilddokumente als Liste, ohne JSON-Handover."),
|
||||
"recommended": False,
|
||||
"type": "List[ActionDocument]",
|
||||
},
|
||||
{
|
||||
"path": ["documents"],
|
||||
"pickerLabel": t("Alle Dateitypen"),
|
||||
"detail": t("Alle Ausgabedokumente nacheinander: JSON-Handover und Bilder."),
|
||||
"recommended": False,
|
||||
"type": "List[ActionDocument]",
|
||||
},
|
||||
]
|
||||
|
||||
AI_RESULT_DATA_PICK_OPTIONS = [
|
||||
{
|
||||
"path": ["documents", 0, "documentData"],
|
||||
"pickerLabel": t("Gesamter Inhalt"),
|
||||
"detail": t(
|
||||
"Hauptausgabedatei oder strukturierter Inhalt von ``documents[0]`` "
|
||||
"(z. B. erzeugtes Dokument, JSON-Handover)."
|
||||
),
|
||||
"recommended": True,
|
||||
"type": "Any",
|
||||
},
|
||||
{
|
||||
"path": ["response"],
|
||||
"pickerLabel": t("Nur Text"),
|
||||
"detail": t("Modell-Antwort als reiner Fließtext (ohne eingebettete Bildbytes)."),
|
||||
"recommended": True,
|
||||
"type": "str",
|
||||
},
|
||||
{
|
||||
"path": ["imageDocumentsOnly"],
|
||||
"pickerLabel": t("Nur Bilder"),
|
||||
"detail": t("Nur Bild-Dokumente aus ``documents`` (ohne erstes Nicht-Bild-Artefakt, falls gesetzt)."),
|
||||
"recommended": False,
|
||||
"type": "List[ActionDocument]",
|
||||
},
|
||||
{
|
||||
"path": ["documents"],
|
||||
"pickerLabel": t("Alle Ausgabedateien"),
|
||||
"detail": t("Alle Dokumente der KI-Antwort: erzeugte Dateien, Bilder, Anhänge."),
|
||||
"recommended": False,
|
||||
"type": "List[Document]",
|
||||
},
|
||||
]
|
||||
|
||||
DOCUMENT_LIST_DATA_PICK_OPTIONS = [
|
||||
{
|
||||
"path": ["documents"],
|
||||
"pickerLabel": t("Alle Dokumente"),
|
||||
"detail": t("Die vollständige Dokumentenliste."),
|
||||
"recommended": True,
|
||||
"type": "List[Document]",
|
||||
},
|
||||
{
|
||||
"path": ["documents", 0],
|
||||
"pickerLabel": t("Erstes Dokument"),
|
||||
"detail": t("Metadaten und Pfade des ersten Listeneintrags."),
|
||||
"recommended": False,
|
||||
"type": "Document",
|
||||
},
|
||||
{
|
||||
"path": ["count"],
|
||||
"pickerLabel": t("Anzahl"),
|
||||
"detail": t("Anzahl der Dokumente."),
|
||||
"recommended": False,
|
||||
"type": "int",
|
||||
},
|
||||
]
|
||||
|
||||
CONSOLIDATE_RESULT_DATA_PICK_OPTIONS = [
|
||||
{
|
||||
"path": ["result"],
|
||||
"pickerLabel": t("Konsolidiertes Ergebnis"),
|
||||
"detail": t("Text oder Struktur nach Konsolidierung."),
|
||||
"recommended": True,
|
||||
"type": "Any",
|
||||
},
|
||||
{
|
||||
"path": ["mode"],
|
||||
"pickerLabel": t("Modus"),
|
||||
"detail": t("Verwendeter Konsolidierungsmodus."),
|
||||
"recommended": False,
|
||||
"type": "str",
|
||||
},
|
||||
{
|
||||
"path": ["count"],
|
||||
"pickerLabel": t("Anzahl"),
|
||||
"detail": t("Anzahl zusammengeführter Elemente."),
|
||||
"recommended": False,
|
||||
"type": "int",
|
||||
},
|
||||
]
|
||||
|
||||
_AI_COMMON_PARAMS = [
|
||||
{"name": "requireNeutralization", "type": "bool", "required": False,
|
||||
"frontendType": "checkbox", "default": False,
|
||||
"description": t("Eingaben fuer diesen Call neutralisieren")},
|
||||
{"name": "allowedModels", "type": "array", "required": False,
|
||||
"frontendType": "modelMultiSelect", "default": [],
|
||||
"description": t("Erlaubte LLM-Modelle (leer = alle erlaubten)")},
|
||||
]
|
||||
|
||||
AI_NODES = [
|
||||
{
|
||||
"id": "ai.prompt",
|
||||
|
|
@ -10,22 +140,27 @@ AI_NODES = [
|
|||
"label": t("Prompt"),
|
||||
"description": t("Prompt eingeben und KI führt aus"),
|
||||
"parameters": [
|
||||
{"name": "aiPrompt", "type": "string", "required": True, "frontendType": "textarea",
|
||||
{"name": "aiPrompt", "type": "str", "required": True, "frontendType": "templateTextarea",
|
||||
"description": t("KI-Prompt")},
|
||||
{"name": "outputFormat", "type": "string", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["text", "json", "emailDraft"]},
|
||||
"description": t("Ausgabeformat"), "default": "text"},
|
||||
{"name": "documentList", "type": "string", "required": False, "frontendType": "hidden",
|
||||
"description": t("Dokumentenliste (via Wire oder DataRef)"), "default": ""},
|
||||
{"name": "context", "type": "string", "required": False, "frontendType": "hidden",
|
||||
"description": t("Kontext-Daten (via Wire oder DataRef)"), "default": ""},
|
||||
{"name": "simpleMode", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "resultType", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["txt", "json", "md", "csv", "xml", "html", "pdf", "docx", "xlsx", "pptx", "png", "jpg"]},
|
||||
"description": t("Ausgabeformat"), "default": "txt"},
|
||||
{"name": "documentList", "type": "DocumentList", "required": False, "frontendType": "hidden",
|
||||
"description": t("Dokumente aus vorherigen Schritten"), "default": "",
|
||||
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||
{"name": "context", "type": "Any", "required": False, "frontendType": "contextBuilder",
|
||||
"description": CONTEXT_BUILDER_PARAM_DESCRIPTION, "default": "",
|
||||
"graphInherit": {"port": 0, "kind": "primaryTextRef"}},
|
||||
{"name": "simpleMode", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Einfacher Modus"), "default": True},
|
||||
],
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "AiResult", "TextResult", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "AiResult"}},
|
||||
"inputPorts": {0: {"accepts": [
|
||||
"FormPayload", "DocumentList", "AiResult", "TextResult", "Transit", "LoopItem", "ActionResult",
|
||||
]}},
|
||||
"outputPorts": {0: {"schema": "AiResult", "dataPickOptions": AI_RESULT_DATA_PICK_OPTIONS}},
|
||||
"paramMappers": ["aiPromptLegacyAlias"],
|
||||
"meta": {"icon": "mdi-robot", "color": "#9C27B0", "usesAi": True},
|
||||
"_method": "ai",
|
||||
"_action": "process",
|
||||
|
|
@ -36,13 +171,21 @@ AI_NODES = [
|
|||
"label": t("Web-Recherche"),
|
||||
"description": t("Recherche im Web"),
|
||||
"parameters": [
|
||||
{"name": "prompt", "type": "string", "required": True, "frontendType": "textarea",
|
||||
{"name": "prompt", "type": "str", "required": True, "frontendType": "textarea",
|
||||
"description": t("Recherche-Anfrage")},
|
||||
],
|
||||
{"name": "context", "type": "Any", "required": False, "frontendType": "contextBuilder",
|
||||
"description": CONTEXT_BUILDER_PARAM_DESCRIPTION, "default": "",
|
||||
"graphInherit": {"port": 0, "kind": "primaryTextRef"}},
|
||||
{"name": "documentList", "type": "DocumentList", "required": False, "frontendType": "hidden",
|
||||
"description": t("Dokumente aus vorherigen Schritten"), "default": "",
|
||||
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "AiResult"}},
|
||||
"inputPorts": {0: {"accepts": [
|
||||
"FormPayload", "Transit", "AiResult", "DocumentList", "ActionResult", "LoopItem", "TextResult",
|
||||
]}},
|
||||
"outputPorts": {0: {"schema": "AiResult", "dataPickOptions": AI_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-magnify", "color": "#9C27B0", "usesAi": True},
|
||||
"_method": "ai",
|
||||
"_action": "webResearch",
|
||||
|
|
@ -53,14 +196,23 @@ AI_NODES = [
|
|||
"label": t("Dokument zusammenfassen"),
|
||||
"description": t("Dokumentinhalt zusammenfassen"),
|
||||
"parameters": [
|
||||
{"name": "summaryLength", "type": "string", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["short", "medium", "long"]},
|
||||
"description": t("Kurz, mittel oder lang"), "default": "medium"},
|
||||
],
|
||||
{"name": "documentList", "type": "DocumentList", "required": True, "frontendType": "dataRef",
|
||||
"description": t("Dokumente aus vorherigen Schritten"),
|
||||
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||
{"name": "resultType", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["txt", "json", "md", "csv", "xml", "html", "pdf", "docx", "xlsx", "pptx", "png", "jpg"]},
|
||||
"description": t("Ausgabeformat"), "default": "txt"},
|
||||
{"name": "summaryLength", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["brief", "medium", "detailed"]},
|
||||
"description": t("Kurz, mittel oder ausführlich"), "default": "medium"},
|
||||
{"name": "folderId", "type": "str", "required": False, "frontendType": "userFileFolder",
|
||||
"description": t("Zielordner in Meine Dateien"),
|
||||
"default": ""},
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "AiResult"}},
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit", "LoopItem"]}},
|
||||
"outputPorts": {0: {"schema": "AiResult", "dataPickOptions": AI_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-file-document-outline", "color": "#9C27B0", "usesAi": True},
|
||||
"_method": "ai",
|
||||
"_action": "summarizeDocument",
|
||||
|
|
@ -71,14 +223,22 @@ AI_NODES = [
|
|||
"label": t("Dokument übersetzen"),
|
||||
"description": t("Dokument in Zielsprache übersetzen"),
|
||||
"parameters": [
|
||||
{"name": "targetLanguage", "type": "string", "required": True, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["en", "de", "fr", "it", "es", "pt", "nl"]},
|
||||
"description": t("Zielsprache")},
|
||||
],
|
||||
{"name": "documentList", "type": "DocumentList", "required": True, "frontendType": "dataRef",
|
||||
"description": t("Dokumente aus vorherigen Schritten"),
|
||||
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||
{"name": "resultType", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["txt", "json", "md", "csv", "xml", "html", "pdf", "docx", "xlsx", "pptx", "png", "jpg"]},
|
||||
"description": t("Ausgabeformat"), "default": "txt"},
|
||||
{"name": "targetLanguage", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Zielsprache (z.B. de, en, French)")},
|
||||
{"name": "folderId", "type": "str", "required": False, "frontendType": "userFileFolder",
|
||||
"description": t("Zielordner in Meine Dateien"),
|
||||
"default": ""},
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "AiResult"}},
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit", "LoopItem"]}},
|
||||
"outputPorts": {0: {"schema": "AiResult", "dataPickOptions": AI_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-translate", "color": "#9C27B0", "usesAi": True},
|
||||
"_method": "ai",
|
||||
"_action": "translateDocument",
|
||||
|
|
@ -89,14 +249,20 @@ AI_NODES = [
|
|||
"label": t("Dokument konvertieren"),
|
||||
"description": t("Dokument in anderes Format konvertieren"),
|
||||
"parameters": [
|
||||
{"name": "targetFormat", "type": "string", "required": True, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["pdf", "docx", "txt", "html", "md"]},
|
||||
{"name": "documentList", "type": "DocumentList", "required": True, "frontendType": "dataRef",
|
||||
"description": t("Dokumente aus vorherigen Schritten"),
|
||||
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||
{"name": "targetFormat", "type": "str", "required": True, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["docx", "pdf", "xlsx", "csv", "txt", "html", "json", "md"]},
|
||||
"description": t("Zielformat")},
|
||||
],
|
||||
{"name": "folderId", "type": "str", "required": False, "frontendType": "userFileFolder",
|
||||
"description": t("Zielordner in Meine Dateien"),
|
||||
"default": ""},
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit", "LoopItem"]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList", "dataPickOptions": DOCUMENT_LIST_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-file-convert", "color": "#9C27B0", "usesAi": True},
|
||||
"_method": "ai",
|
||||
"_action": "convertDocument",
|
||||
|
|
@ -107,13 +273,32 @@ AI_NODES = [
|
|||
"label": t("Dokument generieren"),
|
||||
"description": t("Dokument aus Prompt generieren"),
|
||||
"parameters": [
|
||||
{"name": "prompt", "type": "string", "required": True, "frontendType": "textarea",
|
||||
{"name": "prompt", "type": "str", "required": True, "frontendType": "textarea",
|
||||
"description": t("Generierungs-Prompt")},
|
||||
],
|
||||
{"name": "outputFormat", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["docx", "pdf", "txt", "html", "md"]},
|
||||
"description": t("Ausgabeformat"), "default": "docx"},
|
||||
{"name": "title", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Dokumenttitel (Metadaten / Dateiname)"), "default": ""},
|
||||
{"name": "documentType", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["letter", "memo", "proposal", "contract", "report", "email"]},
|
||||
"description": t("Dokumentart (Inhaltshinweis fuer die KI)"), "default": "proposal"},
|
||||
{"name": "folderId", "type": "str", "required": False, "frontendType": "userFileFolder",
|
||||
"description": t("Zielordner in Meine Dateien"),
|
||||
"default": ""},
|
||||
{"name": "context", "type": "Any", "required": False, "frontendType": "contextBuilder",
|
||||
"description": CONTEXT_BUILDER_PARAM_DESCRIPTION, "default": "",
|
||||
"graphInherit": {"port": 0, "kind": "primaryTextRef"}},
|
||||
{"name": "documentList", "type": "DocumentList", "required": False, "frontendType": "hidden",
|
||||
"description": t("Dokumente aus vorherigen Schritten"), "default": "",
|
||||
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
||||
"inputPorts": {0: {"accepts": [
|
||||
"FormPayload", "Transit", "AiResult", "DocumentList", "ActionResult", "LoopItem", "TextResult",
|
||||
]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList", "dataPickOptions": DOCUMENT_LIST_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-file-plus", "color": "#9C27B0", "usesAi": True},
|
||||
"_method": "ai",
|
||||
"_action": "generateDocument",
|
||||
|
|
@ -124,16 +309,27 @@ AI_NODES = [
|
|||
"label": t("Code generieren"),
|
||||
"description": t("Code aus Beschreibung generieren"),
|
||||
"parameters": [
|
||||
{"name": "prompt", "type": "string", "required": True, "frontendType": "textarea",
|
||||
{"name": "prompt", "type": "str", "required": True, "frontendType": "textarea",
|
||||
"description": t("Code-Generierungs-Prompt")},
|
||||
{"name": "language", "type": "string", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["python", "javascript", "typescript", "java", "csharp", "go"]},
|
||||
"description": t("Programmiersprache"), "default": "python"},
|
||||
],
|
||||
{"name": "resultType", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["py", "js", "ts", "html", "java", "cpp", "txt", "json", "csv", "xml"]},
|
||||
"description": t("Datei-Endung der erzeugten Code-Datei"), "default": "py"},
|
||||
{"name": "folderId", "type": "str", "required": False, "frontendType": "userFileFolder",
|
||||
"description": t("Zielordner in Meine Dateien"),
|
||||
"default": ""},
|
||||
{"name": "context", "type": "Any", "required": False, "frontendType": "contextBuilder",
|
||||
"description": CONTEXT_BUILDER_PARAM_DESCRIPTION, "default": "",
|
||||
"graphInherit": {"port": 0, "kind": "primaryTextRef"}},
|
||||
{"name": "documentList", "type": "DocumentList", "required": False, "frontendType": "hidden",
|
||||
"description": t("Dokumente aus vorherigen Schritten"), "default": "",
|
||||
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "AiResult"}},
|
||||
"inputPorts": {0: {"accepts": [
|
||||
"FormPayload", "Transit", "AiResult", "DocumentList", "ActionResult", "LoopItem", "TextResult",
|
||||
]}},
|
||||
"outputPorts": {0: {"schema": "AiResult", "dataPickOptions": AI_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-code-tags", "color": "#9C27B0", "usesAi": True},
|
||||
"_method": "ai",
|
||||
"_action": "generateCode",
|
||||
|
|
@ -144,16 +340,16 @@ AI_NODES = [
|
|||
"label": t("KI-Konsolidierung"),
|
||||
"description": t("Gesammelte Ergebnisse mit KI zusammenfassen, klassifizieren oder semantisch zusammenführen"),
|
||||
"parameters": [
|
||||
{"name": "mode", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "mode", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["summarize", "classify", "semanticMerge"]},
|
||||
"description": t("Konsolidierungsmodus"), "default": "summarize"},
|
||||
{"name": "prompt", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "prompt", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Optionaler Prompt für die Konsolidierung"), "default": ""},
|
||||
],
|
||||
] + _AI_COMMON_PARAMS,
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["AggregateResult", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ConsolidateResult"}},
|
||||
"outputPorts": {0: {"schema": "ConsolidateResult", "dataPickOptions": CONSOLIDATE_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-table-merge-cells", "color": "#9C27B0", "usesAi": True},
|
||||
"_method": "ai",
|
||||
"_action": "consolidate",
|
||||
|
|
|
|||
|
|
@ -4,6 +4,63 @@
|
|||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
from modules.features.graphicalEditor.nodeDefinitions.ai import ACTION_RESULT_DATA_PICK_OPTIONS
|
||||
|
||||
TASK_LIST_DATA_PICK_OPTIONS = [
|
||||
{
|
||||
"path": ["tasks"],
|
||||
"pickerLabel": t("Alle Aufgaben"),
|
||||
"detail": t("Vollständige Aufgabenliste."),
|
||||
"recommended": True,
|
||||
"type": "List[TaskItem]",
|
||||
},
|
||||
{
|
||||
"path": ["tasks", 0],
|
||||
"pickerLabel": t("Erste Aufgabe"),
|
||||
"detail": t("Erstes Listenelement."),
|
||||
"recommended": False,
|
||||
"type": "TaskItem",
|
||||
},
|
||||
{
|
||||
"path": ["count"],
|
||||
"pickerLabel": t("Anzahl"),
|
||||
"detail": t("Anzahl der Aufgaben."),
|
||||
"recommended": False,
|
||||
"type": "int",
|
||||
},
|
||||
{
|
||||
"path": ["listId"],
|
||||
"pickerLabel": t("Listen-ID"),
|
||||
"detail": t("ClickUp-Listen-Kontext, falls gesetzt."),
|
||||
"recommended": False,
|
||||
"type": "str",
|
||||
},
|
||||
]
|
||||
|
||||
TASK_RESULT_DATA_PICK_OPTIONS = [
|
||||
{
|
||||
"path": ["success"],
|
||||
"pickerLabel": t("Erfolg"),
|
||||
"detail": t("Ob der API-Aufruf erfolgreich war."),
|
||||
"recommended": True,
|
||||
"type": "bool",
|
||||
},
|
||||
{
|
||||
"path": ["taskId"],
|
||||
"pickerLabel": t("Aufgaben-ID"),
|
||||
"detail": t("ID der betroffenen Aufgabe."),
|
||||
"recommended": True,
|
||||
"type": "str",
|
||||
},
|
||||
{
|
||||
"path": ["task"],
|
||||
"pickerLabel": t("Aufgabendaten"),
|
||||
"detail": t("Vollständiges Task-Objekt (Dict)."),
|
||||
"recommended": True,
|
||||
"type": "Dict",
|
||||
},
|
||||
]
|
||||
|
||||
CLICKUP_NODES = [
|
||||
{
|
||||
"id": "clickup.searchTasks",
|
||||
|
|
@ -11,29 +68,29 @@ CLICKUP_NODES = [
|
|||
"label": t("Aufgaben suchen"),
|
||||
"description": t("Aufgaben in einem Workspace suchen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "clickup"},
|
||||
"description": t("ClickUp-Verbindung")},
|
||||
{"name": "teamId", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "teamId", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Team-/Workspace-ID")},
|
||||
{"name": "query", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "query", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Suchbegriff")},
|
||||
{"name": "page", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "page", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Seite"), "default": 0},
|
||||
{"name": "listId", "type": "string", "required": False, "frontendType": "clickupList",
|
||||
{"name": "listId", "type": "str", "required": False, "frontendType": "clickupList",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("In dieser Liste suchen")},
|
||||
{"name": "includeClosed", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "includeClosed", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Erledigte einbeziehen"), "default": False},
|
||||
{"name": "fullTaskData", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "fullTaskData", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Vollständige Daten"), "default": False},
|
||||
{"name": "matchNameOnly", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "matchNameOnly", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Nur Titel"), "default": True},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "TaskList"}},
|
||||
"outputPorts": {0: {"schema": "TaskList", "dataPickOptions": TASK_LIST_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-magnify", "color": "#7B68EE", "usesAi": False},
|
||||
"_method": "clickup",
|
||||
"_action": "searchTasks",
|
||||
|
|
@ -44,21 +101,21 @@ CLICKUP_NODES = [
|
|||
"label": t("Aufgaben auflisten"),
|
||||
"description": t("Aufgaben einer Liste auflisten"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "clickup"},
|
||||
"description": t("ClickUp-Verbindung")},
|
||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "clickupList",
|
||||
{"name": "pathQuery", "type": "str", "required": True, "frontendType": "clickupList",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Pfad zur Liste")},
|
||||
{"name": "page", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "page", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Seite"), "default": 0},
|
||||
{"name": "includeClosed", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "includeClosed", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Erledigte einbeziehen"), "default": False},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "TaskList"}},
|
||||
"outputPorts": {0: {"schema": "TaskList", "dataPickOptions": TASK_LIST_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-format-list-bulleted", "color": "#7B68EE", "usesAi": False},
|
||||
"_method": "clickup",
|
||||
"_action": "listTasks",
|
||||
|
|
@ -69,18 +126,18 @@ CLICKUP_NODES = [
|
|||
"label": t("Aufgabe abrufen"),
|
||||
"description": t("Eine Aufgabe abrufen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "clickup"},
|
||||
"description": t("ClickUp-Verbindung")},
|
||||
{"name": "taskId", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "taskId", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Task-ID")},
|
||||
{"name": "pathQuery", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "pathQuery", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Oder Pfad")},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "TaskResult"}},
|
||||
"outputPorts": {0: {"schema": "TaskResult", "dataPickOptions": TASK_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-file-document-outline", "color": "#7B68EE", "usesAi": False},
|
||||
"_method": "clickup",
|
||||
"_action": "getTask",
|
||||
|
|
@ -91,42 +148,40 @@ CLICKUP_NODES = [
|
|||
"label": t("Aufgabe erstellen"),
|
||||
"description": t("Aufgabe erstellen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "clickup"},
|
||||
"description": t("ClickUp-Verbindung")},
|
||||
{"name": "teamId", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Workspace")},
|
||||
{"name": "pathQuery", "type": "string", "required": False, "frontendType": "clickupList",
|
||||
{"name": "pathQuery", "type": "str", "required": False, "frontendType": "clickupList",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Pfad zur Liste")},
|
||||
{"name": "listId", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "listId", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Listen-ID")},
|
||||
{"name": "name", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "name", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Name")},
|
||||
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "description", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Beschreibung")},
|
||||
{"name": "taskStatus", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "taskStatus", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Status")},
|
||||
{"name": "taskPriority", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "taskPriority", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["1", "2", "3", "4"]},
|
||||
"description": t("Priorität 1-4")},
|
||||
{"name": "taskDueDateMs", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "taskDueDateMs", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Fälligkeit (ms)")},
|
||||
{"name": "taskAssigneeIds", "type": "object", "required": False, "frontendType": "json",
|
||||
"description": t("Zugewiesene")},
|
||||
{"name": "taskTimeEstimateMs", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "taskTimeEstimateMs", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Zeitschätzung (ms)")},
|
||||
{"name": "taskTimeEstimateHours", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "taskTimeEstimateHours", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Zeitschätzung (h)")},
|
||||
{"name": "customFieldValues", "type": "object", "required": False, "frontendType": "json",
|
||||
"description": t("Benutzerdefinierte Felder")},
|
||||
{"name": "taskFields", "type": "string", "required": False, "frontendType": "json",
|
||||
{"name": "taskFields", "type": "str", "required": False, "frontendType": "json",
|
||||
"description": t("Zusätzliches JSON")},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "TaskResult"}},
|
||||
"outputPorts": {0: {"schema": "TaskResult", "dataPickOptions": TASK_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-plus-circle-outline", "color": "#7B68EE", "usesAi": False},
|
||||
"_method": "clickup",
|
||||
"_action": "createTask",
|
||||
|
|
@ -137,22 +192,21 @@ CLICKUP_NODES = [
|
|||
"label": t("Aufgabe aktualisieren"),
|
||||
"description": t("Felder der Aufgabe ändern"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "clickup"},
|
||||
"description": t("ClickUp-Verbindung")},
|
||||
{"name": "taskId", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "taskId", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Task-ID")},
|
||||
{"name": "path", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "path", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Oder Pfad")},
|
||||
{"name": "taskUpdateEntries", "type": "object", "required": False, "frontendType": "keyValueRows",
|
||||
"description": t("Zu ändernde Felder")},
|
||||
{"name": "taskUpdate", "type": "string", "required": False, "frontendType": "json",
|
||||
"description": t("JSON für API")},
|
||||
{"name": "taskUpdate", "type": "str", "required": False, "frontendType": "json",
|
||||
"description": t("JSON-Body für PUT /task/{id}, z.B. {\"name\":\"...\",\"status\":\"...\"}")},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["TaskResult", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "TaskResult"}},
|
||||
"outputPorts": {0: {"schema": "TaskResult", "dataPickOptions": TASK_RESULT_DATA_PICK_OPTIONS}},
|
||||
"paramMappers": ["clickupTaskUpdateMerge"],
|
||||
"meta": {"icon": "mdi-pencil-outline", "color": "#7B68EE", "usesAi": False},
|
||||
"_method": "clickup",
|
||||
"_action": "updateTask",
|
||||
|
|
@ -163,20 +217,22 @@ CLICKUP_NODES = [
|
|||
"label": t("Anhang hochladen"),
|
||||
"description": t("Datei an Task anhängen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "clickup"},
|
||||
"description": t("ClickUp-Verbindung")},
|
||||
{"name": "taskId", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "taskId", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Task-ID")},
|
||||
{"name": "path", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "path", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Oder Pfad")},
|
||||
{"name": "fileName", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "fileName", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Dateiname")},
|
||||
{"name": "content", "type": "str", "required": True, "frontendType": "hidden",
|
||||
"description": t("Datei-Inhalt aus Upstream-Node (via Wire oder DataRef)"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-attachment", "color": "#7B68EE", "usesAi": False},
|
||||
"_method": "clickup",
|
||||
"_action": "uploadAttachment",
|
||||
|
|
|
|||
|
|
@ -1,30 +1,376 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Context node definitions — structural extraction without AI.
|
||||
# Context node definitions — structural extraction without AI plus
|
||||
# generic key/value, merge, filter and transform helpers.
|
||||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
_CONTEXT_INPUT_SCHEMAS = [
|
||||
"Transit",
|
||||
"ActionResult",
|
||||
"AiResult",
|
||||
"MergeResult",
|
||||
"FormPayload",
|
||||
"DocumentList",
|
||||
"EmailList",
|
||||
"TaskList",
|
||||
"FileList",
|
||||
"LoopItem",
|
||||
"UdmDocument",
|
||||
]
|
||||
|
||||
|
||||
_MERGE_RESULT_DATA_PICK_OPTIONS = [
|
||||
{
|
||||
"path": ["merged"],
|
||||
"pickerLabel": t("Zusammengeführt"),
|
||||
"detail": t("Zusammengeführtes Objekt nach gewählter Strategie."),
|
||||
"recommended": True,
|
||||
"type": "Dict",
|
||||
},
|
||||
{
|
||||
"path": ["first"],
|
||||
"pickerLabel": t("Erster Zweig"),
|
||||
"detail": t("Daten vom ersten verbundenen Eingang."),
|
||||
"recommended": False,
|
||||
"type": "Any",
|
||||
},
|
||||
{
|
||||
"path": ["inputs"],
|
||||
"pickerLabel": t("Alle Eingänge"),
|
||||
"detail": t("Dict der Eingabeobjekte nach Port-Index."),
|
||||
"recommended": False,
|
||||
"type": "Dict[int,Any]",
|
||||
},
|
||||
{
|
||||
"path": ["conflicts"],
|
||||
"pickerLabel": t("Konflikte"),
|
||||
"detail": t("Liste der Schlüssel mit Konflikt (nur bei errorOnConflict)."),
|
||||
"recommended": False,
|
||||
"type": "List[str]",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
CONTEXT_NODES = [
|
||||
{
|
||||
"id": "context.extractContent",
|
||||
"category": "context",
|
||||
"label": t("Inhalt extrahieren"),
|
||||
"description": t("Dokumentstruktur extrahieren ohne KI (Seiten, Abschnitte, Bilder, Tabellen)"),
|
||||
"description": t(
|
||||
"Extrahiert Inhalt ohne KI. Ergebnis einheitlich wie KI-Schritte: `response` "
|
||||
"(gesammelter Klartext), strukturierte JSON-Unterlage in `documents[0]`, "
|
||||
"einzelne Bilder als eigene Dokumente `extract_media_*` (nur im Workflow, ohne Eintrag unter „Meine Dateien“) — "
|
||||
"Auswahl im Daten-Picker wie bei `ai.process`."
|
||||
),
|
||||
"parameters": [
|
||||
{"name": "outputDetail", "type": "string", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["full", "structure", "references"]},
|
||||
"description": t("Detailgrad: full = alles, structure = Skelett, references = Dateireferenzen"),
|
||||
"default": "full"},
|
||||
{"name": "includeImages", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Bilder extrahieren"), "default": True},
|
||||
{"name": "includeTables", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Tabellen extrahieren"), "default": True},
|
||||
{"name": "documentList", "type": "str", "required": True, "frontendType": "hidden",
|
||||
"description": t("Dokumentenliste (via Wire oder DataRef)"), "default": "",
|
||||
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "UdmDocument"}},
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit", "LoopItem"]}},
|
||||
"outputPorts": {
|
||||
0: {
|
||||
"schema": "ActionResult",
|
||||
# Authoritative DataPicker paths (same idea as ``parameters`` for configuration).
|
||||
# Frontend uses only this list — no schema expansion merge for this port.
|
||||
"dataPickOptions": [
|
||||
{
|
||||
"path": ["documents", 0, "documentData"],
|
||||
"pickerLabel": t("Gesamter Inhalt"),
|
||||
"detail": t(
|
||||
"Strukturiertes Handover als JSON inklusive aller Textteile "
|
||||
"und Verweisen auf ausgelagerte Bilder."
|
||||
),
|
||||
"recommended": True,
|
||||
"type": "Any",
|
||||
},
|
||||
{
|
||||
"path": ["response"],
|
||||
"pickerLabel": t("Nur Text"),
|
||||
"detail": t(
|
||||
"Verketteter Klartext aus allen erkannten Textteilen."
|
||||
),
|
||||
"recommended": True,
|
||||
"type": "str",
|
||||
},
|
||||
{
|
||||
"path": ["imageDocumentsOnly"],
|
||||
"pickerLabel": t("Nur Bilder"),
|
||||
"detail": t(
|
||||
"Nur die extrahierten Bilddokumente als Liste, ohne JSON-Handover."
|
||||
),
|
||||
"recommended": False,
|
||||
"type": "List[ActionDocument]",
|
||||
},
|
||||
{
|
||||
"path": ["documents"],
|
||||
"pickerLabel": t("Alle Dateitypen"),
|
||||
"detail": t(
|
||||
"Alle Ausgabedokumente nacheinander: JSON-Handover und Bilder."
|
||||
),
|
||||
"recommended": False,
|
||||
"type": "List[ActionDocument]",
|
||||
},
|
||||
],
|
||||
}
|
||||
},
|
||||
"meta": {"icon": "mdi-file-tree-outline", "color": "#00897B", "usesAi": False},
|
||||
"_method": "context",
|
||||
"_action": "extractContent",
|
||||
},
|
||||
{
|
||||
"id": "context.setContext",
|
||||
"category": "context",
|
||||
"label": t("Kontext setzen"),
|
||||
"description": t(
|
||||
"Schreibt in den Workflow-Kontext. Pro Zeile: Ziel-Schlüssel, dann entweder einen "
|
||||
"festen Wert, eine Datenquelle aus dem Graph (Kontext-Picker wie bei anderen Nodes), "
|
||||
"oder eine Aufgabe für einen Benutzer (Human Task) zum Setzen des Werts."
|
||||
),
|
||||
"parameters": [
|
||||
{
|
||||
"name": "scope",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"frontendType": "select",
|
||||
"frontendOptions": {"options": ["local", "global", "session"]},
|
||||
"default": "local",
|
||||
"description": t("Speicherbereich"),
|
||||
},
|
||||
{
|
||||
"name": "assignments",
|
||||
"type": "list",
|
||||
"required": True,
|
||||
"frontendType": "contextAssignments",
|
||||
"default": [],
|
||||
"description": t(
|
||||
"Zuweisungen: Ziel-Schlüssel, Quelle (Picker / fester Wert / Human Task), "
|
||||
"Modus (set, setIfEmpty, append, increment). Optionaler Experten-Pfad `sourcePath` unter der "
|
||||
"gewählten Datenquelle (z. B. payload.status)."
|
||||
),
|
||||
"graphInherit": {"port": 0, "kind": "primaryTextRef"},
|
||||
},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": _CONTEXT_INPUT_SCHEMAS}},
|
||||
"outputPorts": {
|
||||
0: {
|
||||
"schema": "Transit",
|
||||
"dynamic": True,
|
||||
"deriveFrom": "assignments",
|
||||
"deriveNameField": "contextKey",
|
||||
}
|
||||
},
|
||||
"injectUpstreamPayload": True,
|
||||
"injectRunContext": True,
|
||||
"surfaceDataAsTopLevel": True,
|
||||
"meta": {"icon": "mdi-database-edit-outline", "color": "#5C6BC0", "usesAi": False},
|
||||
"_method": "context",
|
||||
"_action": "setContext",
|
||||
},
|
||||
{
|
||||
"id": "context.mergeContext",
|
||||
"category": "context",
|
||||
"label": t("Kontext zusammenführen"),
|
||||
"description": t(
|
||||
"Wartet auf alle verbundenen eingehenden Branches und führt deren "
|
||||
"Kontext-Daten zu einem einheitlichen MergeResult zusammen. "
|
||||
"Strategien: 'shallow' (oberste Ebene), 'deep' (rekursiv), "
|
||||
"'firstWins' / 'lastWins' bei Konflikten, "
|
||||
"'errorOnConflict' (bricht ab und listet Konflikte). "
|
||||
"Der Node blockiert bis alle erwarteten Inputs eingetroffen sind."
|
||||
),
|
||||
"parameters": [
|
||||
{
|
||||
"name": "strategy",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"frontendType": "select",
|
||||
"frontendOptions": {
|
||||
"options": ["shallow", "deep", "firstWins", "lastWins", "errorOnConflict"]
|
||||
},
|
||||
"default": "deep",
|
||||
"description": t("Strategie bei gleichnamigen Keys aus verschiedenen Branches"),
|
||||
},
|
||||
{
|
||||
"name": "waitFor",
|
||||
"type": "int",
|
||||
"required": False,
|
||||
"frontendType": "number",
|
||||
"default": 0,
|
||||
"description": t(
|
||||
"Anzahl Inputs abwarten (0 = alle verbundenen Branches). "
|
||||
"Hilfreich für optionale Branches mit Timeout."
|
||||
),
|
||||
},
|
||||
{
|
||||
"name": "timeoutMs",
|
||||
"type": "int",
|
||||
"required": False,
|
||||
"frontendType": "number",
|
||||
"default": 30000,
|
||||
"description": t(
|
||||
"Maximale Wartezeit in ms — danach wird mit den vorhandenen Inputs fortgesetzt"
|
||||
),
|
||||
},
|
||||
],
|
||||
"inputs": 5,
|
||||
"outputs": 1,
|
||||
"inputPorts": {
|
||||
0: {"accepts": _CONTEXT_INPUT_SCHEMAS},
|
||||
1: {"accepts": _CONTEXT_INPUT_SCHEMAS},
|
||||
2: {"accepts": _CONTEXT_INPUT_SCHEMAS},
|
||||
3: {"accepts": _CONTEXT_INPUT_SCHEMAS},
|
||||
4: {"accepts": _CONTEXT_INPUT_SCHEMAS},
|
||||
},
|
||||
"outputPorts": {
|
||||
0: {"schema": "MergeResult", "dataPickOptions": _MERGE_RESULT_DATA_PICK_OPTIONS}
|
||||
},
|
||||
"waitsForAllPredecessors": True,
|
||||
"injectBranchInputs": True,
|
||||
"meta": {"icon": "mdi-call-merge", "color": "#7B1FA2", "usesAi": False},
|
||||
"_method": "context",
|
||||
"_action": "mergeContext",
|
||||
},
|
||||
{
|
||||
"id": "context.filterContext",
|
||||
"category": "context",
|
||||
"label": t("Kontext filtern"),
|
||||
"description": t(
|
||||
"Gibt nur bestimmte Felder des eingehenden Datenstroms weiter. "
|
||||
"Modus 'allow': nur diese Keys passieren. "
|
||||
"Modus 'block': diese Keys werden entfernt, alles andere bleibt. "
|
||||
"Unterstützt Pfadausdrücke (z.B. 'user.*', '*.id') und tiefe Pfade ('address.city'). "
|
||||
"Fehlende Keys werden je nach 'missingKeyBehavior' ignoriert, mit null befüllt oder als Fehler behandelt."
|
||||
),
|
||||
"parameters": [
|
||||
{
|
||||
"name": "mode",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"frontendType": "select",
|
||||
"frontendOptions": {"options": ["allow", "block"]},
|
||||
"default": "allow",
|
||||
"description": t("Allowlist (nur diese durch) oder Blocklist (diese entfernen)"),
|
||||
},
|
||||
{
|
||||
"name": "keys",
|
||||
"type": "list",
|
||||
"required": True,
|
||||
"frontendType": "stringList",
|
||||
"default": [],
|
||||
"description": t(
|
||||
"Key-Pfade oder Wildcard-Muster. "
|
||||
"Beispiele: 'response', 'user.*', '*.id', 'address.city'."
|
||||
),
|
||||
},
|
||||
{
|
||||
"name": "missingKeyBehavior",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"frontendType": "select",
|
||||
"frontendOptions": {"options": ["skip", "nullFill", "error"]},
|
||||
"default": "skip",
|
||||
"description": t("Verhalten wenn ein erlaubter Key im Input fehlt"),
|
||||
},
|
||||
{
|
||||
"name": "preserveMeta",
|
||||
"type": "bool",
|
||||
"required": False,
|
||||
"frontendType": "checkbox",
|
||||
"default": True,
|
||||
"description": t("Interne Meta-Felder (_success, _error, _transit) immer durchlassen"),
|
||||
},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": _CONTEXT_INPUT_SCHEMAS}},
|
||||
"outputPorts": {
|
||||
0: {
|
||||
"schema": "Transit",
|
||||
"dynamic": True,
|
||||
"deriveFrom": "keys",
|
||||
}
|
||||
},
|
||||
"injectUpstreamPayload": True,
|
||||
"surfaceDataAsTopLevel": True,
|
||||
"meta": {"icon": "mdi-filter-outline", "color": "#00838F", "usesAi": False},
|
||||
"_method": "context",
|
||||
"_action": "filterContext",
|
||||
},
|
||||
{
|
||||
"id": "context.transformContext",
|
||||
"category": "context",
|
||||
"label": t("Kontext transformieren"),
|
||||
"description": t(
|
||||
"Verändert die Struktur des eingehenden Datenstroms. "
|
||||
"Operationen pro Mapping: 'rename' (Key umbenennen), 'cast' (Typ konvertieren), "
|
||||
"'nest' (mehrere Felder unter neuem Objekt zusammenfassen), "
|
||||
"'flatten' (verschachteltes Objekt auf oberste Ebene heben), "
|
||||
"'compute' (neues Feld aus Template-/{{...}}-Ausdruck berechnen). "
|
||||
"Jedes Mapping definiert: 'sourceField' (Eingangspfad / Ausdruck), "
|
||||
"'outputField' (Ausgabe-Key), 'operation' und 'type' (Zieltyp). "
|
||||
"Das Ergebnis ist ein neues Objekt — der ursprüngliche Datenstrom "
|
||||
"wird nicht automatisch weitergegeben (ausser 'passthroughUnmapped: true')."
|
||||
),
|
||||
"parameters": [
|
||||
{
|
||||
"name": "mappings",
|
||||
"type": "list",
|
||||
"required": True,
|
||||
"frontendType": "mappingTable",
|
||||
"default": [],
|
||||
"description": t(
|
||||
"Liste von Mapping-Einträgen. Jeder Eintrag: "
|
||||
"sourceField (DataRef-Pfad oder Ausdruck), "
|
||||
"outputField (Ziel-Key im Output), "
|
||||
"operation (rename | cast | nest | flatten | compute), "
|
||||
"type (str | int | bool | float | object | list — für cast), "
|
||||
"expression (für compute: Template oder Ausdruck, z.B. '{{firstName}} {{lastName}}')."
|
||||
),
|
||||
},
|
||||
{
|
||||
"name": "passthroughUnmapped",
|
||||
"type": "bool",
|
||||
"required": False,
|
||||
"frontendType": "checkbox",
|
||||
"default": False,
|
||||
"description": t(
|
||||
"Alle nicht gemappten Felder des Eingangs zusätzlich in den Output übernehmen."
|
||||
),
|
||||
},
|
||||
{
|
||||
"name": "flattenDepth",
|
||||
"type": "int",
|
||||
"required": False,
|
||||
"frontendType": "number",
|
||||
"default": 1,
|
||||
"description": t("Tiefe für flatten-Operation (1 = eine Ebene, -1 = vollständig)"),
|
||||
},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": _CONTEXT_INPUT_SCHEMAS}},
|
||||
"outputPorts": {
|
||||
0: {
|
||||
"schema": {
|
||||
"kind": "fromGraph",
|
||||
"parameter": "mappings",
|
||||
"nameField": "outputField",
|
||||
"schemaName": "Transform_dynamic",
|
||||
},
|
||||
"dynamic": True,
|
||||
"deriveFrom": "mappings",
|
||||
"deriveNameField": "outputField",
|
||||
}
|
||||
},
|
||||
"injectUpstreamPayload": True,
|
||||
"surfaceDataAsTopLevel": True,
|
||||
"meta": {"icon": "mdi-swap-horizontal", "color": "#EF6C00", "usesAi": False},
|
||||
"_method": "context",
|
||||
"_action": "transformContext",
|
||||
},
|
||||
]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,22 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
# Shared parameter copy for ``contextBuilder`` fields (upstream data pick).
|
||||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
CONTEXT_BUILDER_PARAM_DESCRIPTION = t(
|
||||
"Inhalt aus vorherigen Schritten wählen (DataRef / Daten-Picker): z. B. „response“ für Klartext, "
|
||||
"Handover-Pfade für strukturiertes JSON oder Medienlisten. "
|
||||
"Die Auflösung erfolgt vollständig serverseitig (`resolveParameterReferences`). "
|
||||
"Formular-Schritte speichern Antworten unter „payload“ — fehlt ein gewählter Pfad am Root, "
|
||||
"wird derselbe Pfad automatisch unter „payload“ nachgeschlagen (Kompatibilität mit älteren "
|
||||
"und neuen Picker-Pfaden). "
|
||||
"In Freitext-/Template-Feldern werden weiterhin Platzhalter `{{KnotenId.feld.b.z.}}` ersetzt "
|
||||
"(gleiche Semantik inkl. optionalem Nachschlagen unter „payload“)."
|
||||
)
|
||||
|
||||
# Kurzreferenz für Node-Beschreibungen (optional einbinden): dieselbe Auflösungslogik
|
||||
# wie bei DataRefs — kein separates Variablen-Subsystem.
|
||||
REF_AND_TEMPLATE_COMPATIBILITY_SUMMARY = t(
|
||||
"Verweise: typisierte DataRefs im Parameter; Zeichenketten-Templates mit {{…}}; "
|
||||
"Formular-Felder unter output.payload."
|
||||
)
|
||||
|
|
@ -3,6 +3,25 @@
|
|||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
from modules.features.graphicalEditor.nodeDefinitions.ai import CONSOLIDATE_RESULT_DATA_PICK_OPTIONS
|
||||
|
||||
AGGREGATE_RESULT_DATA_PICK_OPTIONS = [
|
||||
{
|
||||
"path": ["items"],
|
||||
"pickerLabel": t("Gesammelte Elemente"),
|
||||
"detail": t("Alle aus der Schleife gesammelten Werte."),
|
||||
"recommended": True,
|
||||
"type": "List[Any]",
|
||||
},
|
||||
{
|
||||
"path": ["count"],
|
||||
"pickerLabel": t("Anzahl"),
|
||||
"detail": t("Anzahl gesammelter Elemente."),
|
||||
"recommended": False,
|
||||
"type": "int",
|
||||
},
|
||||
]
|
||||
|
||||
DATA_NODES = [
|
||||
{
|
||||
"id": "data.aggregate",
|
||||
|
|
@ -10,42 +29,26 @@ DATA_NODES = [
|
|||
"label": t("Sammeln"),
|
||||
"description": t("Ergebnisse aus Schleifen-Iterationen sammeln"),
|
||||
"parameters": [
|
||||
{"name": "mode", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "mode", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["collect", "concat", "sum", "count"]},
|
||||
"description": t("Aggregationsmodus"), "default": "collect"},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "AggregateResult"}},
|
||||
"inputPorts": {0: {"accepts": ["Transit", "AiResult", "LoopItem"]}},
|
||||
"outputPorts": {0: {"schema": "AggregateResult", "dataPickOptions": AGGREGATE_RESULT_DATA_PICK_OPTIONS}},
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-playlist-plus", "color": "#607D8B", "usesAi": False},
|
||||
},
|
||||
{
|
||||
"id": "data.transform",
|
||||
"category": "data",
|
||||
"label": t("Umwandeln"),
|
||||
"description": t("Daten umstrukturieren"),
|
||||
"parameters": [
|
||||
{"name": "mappings", "type": "json", "required": True, "frontendType": "mappingTable",
|
||||
"description": t("Feld-Zuordnungen"), "default": []},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dynamic": True, "deriveFrom": "mappings"}},
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-swap-horizontal-bold", "color": "#607D8B", "usesAi": False},
|
||||
},
|
||||
{
|
||||
"id": "data.filter",
|
||||
"category": "data",
|
||||
"label": t("Filtern"),
|
||||
"description": t("Elemente nach Bedingung filtern"),
|
||||
"parameters": [
|
||||
{"name": "condition", "type": "string", "required": True, "frontendType": "filterExpression",
|
||||
{"name": "condition", "type": "str", "required": True, "frontendType": "filterExpression",
|
||||
"description": t("Filterbedingung")},
|
||||
{"name": "udmContentType", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "udmContentType", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["", "text", "image", "table", "code", "media", "link", "formula"]},
|
||||
"description": t("UDM-ContentType-Filter (optional, leer = kein UDM-Filter)"), "default": ""},
|
||||
],
|
||||
|
|
@ -62,16 +65,16 @@ DATA_NODES = [
|
|||
"label": t("Konsolidieren"),
|
||||
"description": t("Gesammelte Ergebnisse deterministisch zusammenführen (Tabelle, CSV, Merge)"),
|
||||
"parameters": [
|
||||
{"name": "mode", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "mode", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["table", "concat", "merge", "csvJoin"]},
|
||||
"description": t("Konsolidierungsmodus"), "default": "table"},
|
||||
{"name": "separator", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "separator", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Trennzeichen (für concat/csvJoin)"), "default": "\n"},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["AggregateResult", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ConsolidateResult"}},
|
||||
"outputPorts": {0: {"schema": "ConsolidateResult", "dataPickOptions": CONSOLIDATE_RESULT_DATA_PICK_OPTIONS}},
|
||||
"executor": "data",
|
||||
"meta": {"icon": "mdi-table-merge-cells", "color": "#607D8B", "usesAi": False},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -3,6 +3,35 @@
|
|||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
from modules.features.graphicalEditor.nodeDefinitions.contextPickerHelp import (
|
||||
CONTEXT_BUILDER_PARAM_DESCRIPTION,
|
||||
)
|
||||
from modules.features.graphicalEditor.nodeDefinitions.ai import ACTION_RESULT_DATA_PICK_OPTIONS
|
||||
|
||||
EMAIL_LIST_DATA_PICK_OPTIONS = [
|
||||
{
|
||||
"path": ["emails"],
|
||||
"pickerLabel": t("Alle E-Mails"),
|
||||
"detail": t("Die vollständige E-Mail-Liste des Schritts."),
|
||||
"recommended": True,
|
||||
"type": "List[EmailItem]",
|
||||
},
|
||||
{
|
||||
"path": ["emails", 0],
|
||||
"pickerLabel": t("Erste E-Mail"),
|
||||
"detail": t("Das erste Element der Liste."),
|
||||
"recommended": False,
|
||||
"type": "EmailItem",
|
||||
},
|
||||
{
|
||||
"path": ["count"],
|
||||
"pickerLabel": t("Anzahl"),
|
||||
"detail": t("Anzahl gefundener E-Mails."),
|
||||
"recommended": False,
|
||||
"type": "int",
|
||||
},
|
||||
]
|
||||
|
||||
EMAIL_NODES = [
|
||||
{
|
||||
"id": "email.checkEmail",
|
||||
|
|
@ -10,26 +39,21 @@ EMAIL_NODES = [
|
|||
"label": t("E-Mail prüfen"),
|
||||
"description": t("Neue E-Mails prüfen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("E-Mail-Konto Verbindung")},
|
||||
{"name": "folder", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "folder", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Ordner"), "default": "Inbox"},
|
||||
{"name": "limit", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "limit", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Max E-Mails"), "default": 100},
|
||||
{"name": "fromAddress", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Nur von dieser Adresse"), "default": ""},
|
||||
{"name": "subjectContains", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Betreff muss enthalten"), "default": ""},
|
||||
{"name": "hasAttachment", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Nur mit Anhängen"), "default": False},
|
||||
{"name": "filter", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Erweitert: Filter-Text"), "default": ""},
|
||||
{"name": "filter", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Filter-Ausdruck (z.B. 'from:max@example.com hasAttachment:true betreff')"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "EmailList"}},
|
||||
"outputPorts": {0: {"schema": "EmailList", "dataPickOptions": EMAIL_LIST_DATA_PICK_OPTIONS}},
|
||||
"paramMappers": ["emailCheckFilter"],
|
||||
"meta": {"icon": "mdi-email-check", "color": "#1976D2", "usesAi": False},
|
||||
"_method": "outlook",
|
||||
"_action": "readEmails",
|
||||
|
|
@ -40,32 +64,21 @@ EMAIL_NODES = [
|
|||
"label": t("E-Mail suchen"),
|
||||
"description": t("E-Mails suchen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("E-Mail-Konto Verbindung")},
|
||||
{"name": "query", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Suchbegriff"), "default": ""},
|
||||
{"name": "folder", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Ordner"), "default": "Inbox"},
|
||||
{"name": "limit", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "query", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Suchausdruck (z.B. 'from:max@example.com hasAttachments:true Rechnung')")},
|
||||
{"name": "folder", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Ordner"), "default": "All"},
|
||||
{"name": "limit", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Max E-Mails"), "default": 100},
|
||||
{"name": "fromAddress", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Von Adresse"), "default": ""},
|
||||
{"name": "toAddress", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("An Adresse"), "default": ""},
|
||||
{"name": "subjectContains", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Betreff enthält"), "default": ""},
|
||||
{"name": "bodyContains", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Inhalt enthält"), "default": ""},
|
||||
{"name": "hasAttachment", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Mit Anhängen"), "default": False},
|
||||
{"name": "filter", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Erweitert: KQL-Filter"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "EmailList"}},
|
||||
"outputPorts": {0: {"schema": "EmailList", "dataPickOptions": EMAIL_LIST_DATA_PICK_OPTIONS}},
|
||||
"paramMappers": ["emailSearchQuery"],
|
||||
"meta": {"icon": "mdi-email-search", "color": "#1976D2", "usesAi": False},
|
||||
"_method": "outlook",
|
||||
"_action": "searchEmails",
|
||||
|
|
@ -74,27 +87,32 @@ EMAIL_NODES = [
|
|||
"id": "email.draftEmail",
|
||||
"category": "email",
|
||||
"label": t("E-Mail entwerfen"),
|
||||
"description": t("E-Mail-Entwurf erstellen"),
|
||||
"description": t(
|
||||
"AI-gestützt einen E-Mail-Entwurf aus Kontext und optionalen Dokumenten erstellen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("E-Mail-Konto")},
|
||||
{"name": "subject", "type": "string", "required": True, "frontendType": "text",
|
||||
"description": t("Betreff")},
|
||||
{"name": "body", "type": "string", "required": True, "frontendType": "textarea",
|
||||
"description": t("Inhalt")},
|
||||
{"name": "to", "type": "string", "required": False, "frontendType": "text",
|
||||
"description": t("Empfänger"), "default": ""},
|
||||
{"name": "attachments", "type": "json", "required": False, "frontendType": "attachmentBuilder",
|
||||
"description": t(
|
||||
"Anhänge: Liste von { contentRef | csvFromVariable | base64Content, name, mimeType }. "
|
||||
"Per Wire befüllbar (z.B. CSV aus data.consolidate)."),
|
||||
"default": []},
|
||||
{"name": "context", "type": "Any", "required": False, "frontendType": "templateTextarea",
|
||||
"description": CONTEXT_BUILDER_PARAM_DESCRIPTION, "default": "",
|
||||
"graphInherit": {"port": 0, "kind": "primaryTextRef"}},
|
||||
{"name": "to", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Empfänger (komma-separiert, optional für Entwurf)"), "default": ""},
|
||||
{"name": "documentList", "type": "str", "required": False, "frontendType": "hidden",
|
||||
"description": t("Anhang-Dokumente (via Wire oder DataRef)"), "default": "",
|
||||
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||
{"name": "emailContent", "type": "str", "required": False, "frontendType": "hidden",
|
||||
"description": t("Direkt vorbereiteter Inhalt {subject, body, to} (via Wire — überspringt KI)"),
|
||||
"default": ""},
|
||||
{"name": "emailStyle", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["formal", "casual", "business"]},
|
||||
"description": t("Stil"), "default": "business"},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["EmailDraft", "AiResult", "Transit", "ConsolidateResult", "DocumentList"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"paramMappers": ["emailDraftContextFromSubjectBody"],
|
||||
"meta": {"icon": "mdi-email-edit", "color": "#1976D2", "usesAi": False},
|
||||
"_method": "outlook",
|
||||
"_action": "composeAndDraftEmailWithContext",
|
||||
|
|
|
|||
|
|
@ -3,33 +3,38 @@
|
|||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
from modules.features.graphicalEditor.nodeDefinitions.contextPickerHelp import (
|
||||
CONTEXT_BUILDER_PARAM_DESCRIPTION,
|
||||
)
|
||||
from modules.features.graphicalEditor.nodeDefinitions.ai import DOCUMENT_LIST_DATA_PICK_OPTIONS
|
||||
|
||||
FILE_NODES = [
|
||||
{
|
||||
"id": "file.create",
|
||||
"category": "file",
|
||||
"label": t("Datei erstellen"),
|
||||
"description": t("Erstellt eine Datei aus Kontext (Text/Markdown von KI)."),
|
||||
"description": t(
|
||||
"Erstellt eine Datei aus Kontext. Nach „Inhalt extrahieren“: „response“ für reinen Text; "
|
||||
"„Nur Bilder“ liefert alle extrahierten Bilder — Datei erstellen fasst sie zu einer PDF oder DOCX "
|
||||
"(Ausgabeformat pdf oder docx wählen)."
|
||||
),
|
||||
"parameters": [
|
||||
{"name": "contentSources", "type": "json", "required": False, "frontendType": "json",
|
||||
"description": t("Kontext-Quellen"), "default": []},
|
||||
{"name": "outputFormat", "type": "string", "required": True, "frontendType": "select",
|
||||
{"name": "outputFormat", "type": "str", "required": True, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["docx", "pdf", "txt", "html", "md"]},
|
||||
"description": t("Ausgabeformat"), "default": "docx"},
|
||||
{"name": "title", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "title", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Dokumenttitel")},
|
||||
{"name": "templateName", "type": "string", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["default", "corporate", "minimal"]},
|
||||
"description": t("Stil-Vorlage")},
|
||||
{"name": "language", "type": "string", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["de", "en", "fr"]},
|
||||
"description": t("Sprache"), "default": "de"},
|
||||
{"name": "context", "type": "string", "required": False, "frontendType": "hidden",
|
||||
"description": t("Inhalt (via Wire oder DataRef)"), "default": ""},
|
||||
{"name": "folderId", "type": "str", "required": False, "frontendType": "userFileFolder",
|
||||
"description": t("Zielordner in Meine Dateien"),
|
||||
"default": ""},
|
||||
{"name": "context", "type": "Any", "required": False, "frontendType": "contextBuilder",
|
||||
"description": CONTEXT_BUILDER_PARAM_DESCRIPTION, "default": "",
|
||||
"graphInherit": {"port": 0, "kind": "primaryTextRef"}},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["AiResult", "TextResult", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
||||
"inputPorts": {0: {"accepts": ["AiResult", "TextResult", "Transit", "FormPayload", "LoopItem", "ActionResult"]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList", "dataPickOptions": DOCUMENT_LIST_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-file-plus-outline", "color": "#2196F3", "usesAi": False},
|
||||
"_method": "file",
|
||||
"_action": "create",
|
||||
|
|
|
|||
|
|
@ -3,25 +3,101 @@
|
|||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
LOOP_ITEM_DATA_PICK_OPTIONS = [
|
||||
{
|
||||
"path": ["currentItem"],
|
||||
"pickerLabel": t("Aktuelles Element"),
|
||||
"detail": t("Das aktuelle Iterationselement."),
|
||||
"recommended": True,
|
||||
"type": "Any",
|
||||
},
|
||||
{
|
||||
"path": ["currentIndex"],
|
||||
"pickerLabel": t("Aktueller Index"),
|
||||
"detail": t("0-basierter Index der aktuellen Iteration."),
|
||||
"recommended": False,
|
||||
"type": "int",
|
||||
},
|
||||
{
|
||||
"path": ["items"],
|
||||
"pickerLabel": t("Alle Elemente"),
|
||||
"detail": t("Die vollständige Quellliste."),
|
||||
"recommended": False,
|
||||
"type": "List[Any]",
|
||||
},
|
||||
{
|
||||
"path": ["count"],
|
||||
"pickerLabel": t("Gesamtanzahl"),
|
||||
"detail": t("Anzahl der Elemente in der Schleife."),
|
||||
"recommended": False,
|
||||
"type": "int",
|
||||
},
|
||||
]
|
||||
|
||||
MERGE_RESULT_DATA_PICK_OPTIONS = [
|
||||
{
|
||||
"path": ["merged"],
|
||||
"pickerLabel": t("Zusammengeführt"),
|
||||
"detail": t("Zusammengeführtes Ergebnis (je nach Modus)."),
|
||||
"recommended": True,
|
||||
"type": "Dict",
|
||||
},
|
||||
{
|
||||
"path": ["first"],
|
||||
"pickerLabel": t("Erster Zweig"),
|
||||
"detail": t("Daten vom ersten verbundenen Eingang (Modus „first“)."),
|
||||
"recommended": False,
|
||||
"type": "Any",
|
||||
},
|
||||
{
|
||||
"path": ["inputs"],
|
||||
"pickerLabel": t("Alle Eingänge"),
|
||||
"detail": t("Dict der Eingabeobjekte nach Port-Index."),
|
||||
"recommended": False,
|
||||
"type": "Dict[int,Any]",
|
||||
},
|
||||
]
|
||||
|
||||
# Ports, die typische Schritt-Ausgaben durchreichen (nicht nur leerer Transit).
|
||||
_FLOW_INPUT_SCHEMAS = [
|
||||
"Transit",
|
||||
"FormPayload",
|
||||
"AiResult",
|
||||
"TextResult",
|
||||
"ActionResult",
|
||||
"DocumentList",
|
||||
"FileList",
|
||||
"EmailList",
|
||||
"TaskList",
|
||||
"QueryResult",
|
||||
"MergeResult",
|
||||
"LoopItem",
|
||||
"BoolResult",
|
||||
"UdmDocument",
|
||||
]
|
||||
|
||||
FLOW_NODES = [
|
||||
{
|
||||
"id": "flow.ifElse",
|
||||
"category": "flow",
|
||||
"label": t("Wenn / Sonst"),
|
||||
"description": t("Verzweigung nach Bedingung"),
|
||||
"description": t(
|
||||
"Verzweigt anhand einer Bedingung auf ein vorheriges Feld oder einen Ausdruck. "
|
||||
"Die Daten vom Eingangskanal werden an den gewählten Ausgang durchgereicht."
|
||||
),
|
||||
"parameters": [
|
||||
{
|
||||
"name": "condition",
|
||||
"type": "string",
|
||||
"type": "json",
|
||||
"required": True,
|
||||
"frontendType": "condition",
|
||||
"description": t("Bedingung"),
|
||||
"description": t("Bedingung: Feld aus einem vorherigen Schritt und Vergleich"),
|
||||
},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 2,
|
||||
"outputLabels": [t("Ja"), t("Nein")],
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"inputPorts": {0: {"accepts": list(_FLOW_INPUT_SCHEMAS)}},
|
||||
"outputPorts": {0: {"schema": "Transit"}, 1: {"schema": "Transit"}},
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-source-branch", "color": "#FF9800", "usesAi": False},
|
||||
|
|
@ -30,26 +106,29 @@ FLOW_NODES = [
|
|||
"id": "flow.switch",
|
||||
"category": "flow",
|
||||
"label": t("Switch"),
|
||||
"description": t("Mehrere Zweige nach Wert"),
|
||||
"description": t(
|
||||
"Mehrere Zweige nach einem Wert aus einem vorherigen Schritt (Data Picker). "
|
||||
"Definiere Fälle mit Vergleichsoperator; der Eingang wird an den ersten passenden Zweig durchgereicht."
|
||||
),
|
||||
"parameters": [
|
||||
{
|
||||
"name": "value",
|
||||
"type": "string",
|
||||
"type": "Any",
|
||||
"required": True,
|
||||
"frontendType": "text",
|
||||
"description": t("Zu vergleichender Wert"),
|
||||
"frontendType": "dataRef",
|
||||
"description": t("Wert zum Vergleichen (Feld aus einem vorherigen Schritt)"),
|
||||
},
|
||||
{
|
||||
"name": "cases",
|
||||
"type": "array",
|
||||
"required": False,
|
||||
"frontendType": "caseList",
|
||||
"description": t("Fälle"),
|
||||
"description": t("Fälle: Operator und Vergleichswert"),
|
||||
},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"inputPorts": {0: {"accepts": list(_FLOW_INPUT_SCHEMAS)}},
|
||||
"outputPorts": {0: {"schema": "Transit"}},
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-swap-horizontal", "color": "#FF9800", "usesAi": False},
|
||||
|
|
@ -57,39 +136,45 @@ FLOW_NODES = [
|
|||
{
|
||||
"id": "flow.loop",
|
||||
"category": "flow",
|
||||
"label": t("Schleife / Für Jedes"),
|
||||
"description": t("Über Array-Elemente oder UDM-Strukturebenen iterieren"),
|
||||
"label": t("Schleife / Für jedes"),
|
||||
"description": t(
|
||||
"Iteriert über ein Array aus einem vorherigen Schritt (z. B. documente, Zeilen, Listeneinträge). "
|
||||
"Optional: UDM-Ebene für strukturierte Dokumente."
|
||||
),
|
||||
"parameters": [
|
||||
{
|
||||
"name": "items",
|
||||
"type": "string",
|
||||
"type": "Any",
|
||||
"required": True,
|
||||
"frontendType": "text",
|
||||
"description": t("Pfad zum Array"),
|
||||
"frontendType": "dataRef",
|
||||
"description": t("Liste oder Sammlung zum Durchlaufen (im Data Picker wählen)"),
|
||||
},
|
||||
{
|
||||
"name": "level",
|
||||
"type": "string",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"frontendType": "select",
|
||||
"frontendOptions": {"options": ["auto", "documents", "structuralNodes", "contentBlocks"]},
|
||||
"description": t("UDM-Iterationsebene"),
|
||||
"description": t("Nur bei UDM-Daten: welche Strukturebene als Elemente verwendet wird"),
|
||||
"default": "auto",
|
||||
},
|
||||
{
|
||||
"name": "concurrency",
|
||||
"type": "number",
|
||||
"type": "int",
|
||||
"required": False,
|
||||
"frontendType": "number",
|
||||
"frontendOptions": {"min": 1, "max": 20},
|
||||
"description": t("Parallele Iterationen (1 = sequentiell)"),
|
||||
"description": t("Parallele Durchläufe (1 = nacheinander)"),
|
||||
"default": 1,
|
||||
},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit", "UdmDocument"]}},
|
||||
"outputPorts": {0: {"schema": "LoopItem"}},
|
||||
"inputPorts": {0: {"accepts": [
|
||||
"Transit", "UdmDocument", "EmailList", "DocumentList", "FileList", "TaskList",
|
||||
"ActionResult", "AiResult", "QueryResult", "FormPayload",
|
||||
]}},
|
||||
"outputPorts": {0: {"schema": "LoopItem", "dataPickOptions": LOOP_ITEM_DATA_PICK_OPTIONS}},
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-repeat", "color": "#FF9800", "usesAi": False},
|
||||
},
|
||||
|
|
@ -97,31 +182,37 @@ FLOW_NODES = [
|
|||
"id": "flow.merge",
|
||||
"category": "flow",
|
||||
"label": t("Zusammenführen"),
|
||||
"description": t("Mehrere Zweige zusammenführen (2-5 Eingänge)"),
|
||||
"description": t(
|
||||
"Führt 2–5 Zweige zusammen, wenn alle verbunden sind. "
|
||||
"Modus legt fest, wie die Eingabeobjekte im Ergebnis kombiniert werden."
|
||||
),
|
||||
"parameters": [
|
||||
{
|
||||
"name": "mode",
|
||||
"type": "string",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"frontendType": "select",
|
||||
"frontendOptions": {"options": ["first", "all", "append"]},
|
||||
"description": t("Zusammenführungsmodus"),
|
||||
"description": t("first: erster Zweig; all: Dict-Felder zusammenführen; append: Listen anhängen"),
|
||||
"default": "first",
|
||||
},
|
||||
{
|
||||
"name": "inputCount",
|
||||
"type": "number",
|
||||
"type": "int",
|
||||
"required": False,
|
||||
"frontendType": "number",
|
||||
"frontendOptions": {"min": 2, "max": 5},
|
||||
"description": t("Anzahl Eingänge"),
|
||||
"description": t("Anzahl Eingänge dieses Nodes (2–5)"),
|
||||
"default": 2,
|
||||
},
|
||||
],
|
||||
"inputs": 2,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}, 1: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "MergeResult"}},
|
||||
"inputPorts": {
|
||||
0: {"accepts": list(_FLOW_INPUT_SCHEMAS)},
|
||||
1: {"accepts": list(_FLOW_INPUT_SCHEMAS)},
|
||||
},
|
||||
"outputPorts": {0: {"schema": "MergeResult", "dataPickOptions": MERGE_RESULT_DATA_PICK_OPTIONS}},
|
||||
"executor": "flow",
|
||||
"meta": {"icon": "mdi-call-merge", "color": "#FF9800", "usesAi": False},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -3,6 +3,47 @@
|
|||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
from modules.features.graphicalEditor.nodeDefinitions.ai import DOCUMENT_LIST_DATA_PICK_OPTIONS
|
||||
|
||||
BOOL_RESULT_DATA_PICK_OPTIONS = [
|
||||
{
|
||||
"path": ["result"],
|
||||
"pickerLabel": t("Ergebnis"),
|
||||
"detail": t("Boolesches Ergebnis (z. B. Genehmigung ja/nein)."),
|
||||
"recommended": True,
|
||||
"type": "bool",
|
||||
},
|
||||
{
|
||||
"path": ["reason"],
|
||||
"pickerLabel": t("Begründung"),
|
||||
"detail": t("Optionale textuelle Begründung."),
|
||||
"recommended": False,
|
||||
"type": "str",
|
||||
},
|
||||
]
|
||||
|
||||
TEXT_RESULT_DATA_PICK_OPTIONS = [
|
||||
{
|
||||
"path": ["text"],
|
||||
"pickerLabel": t("Text"),
|
||||
"detail": t("Vom Benutzer eingegebener oder gewählter Text."),
|
||||
"recommended": True,
|
||||
"type": "str",
|
||||
},
|
||||
]
|
||||
|
||||
# Canonical form field types — single source of truth.
|
||||
# portType maps to the PORT_TYPE_CATALOG primitive used by DataPicker / validateGraph.
|
||||
FORM_FIELD_TYPES = [
|
||||
{"id": "text", "label": "Text (einzeilig)", "portType": "str"},
|
||||
{"id": "textarea", "label": "Text (mehrzeilig)", "portType": "str"},
|
||||
{"id": "number", "label": "Zahl", "portType": "int"},
|
||||
{"id": "boolean", "label": "Ja/Nein", "portType": "bool"},
|
||||
{"id": "date", "label": "Datum", "portType": "str"},
|
||||
{"id": "email", "label": "E-Mail", "portType": "str"},
|
||||
{"id": "select", "label": "Auswahl", "portType": "str"},
|
||||
]
|
||||
|
||||
INPUT_NODES = [
|
||||
{
|
||||
"id": "input.form",
|
||||
|
|
@ -22,7 +63,7 @@ INPUT_NODES = [
|
|||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "FormPayload", "dynamic": True, "deriveFrom": "fields"}},
|
||||
"outputPorts": {0: {"schema": {"kind": "fromGraph", "parameter": "fields"}}},
|
||||
"executor": "input",
|
||||
"meta": {"icon": "mdi-form-textbox", "color": "#9C27B0", "usesAi": False},
|
||||
},
|
||||
|
|
@ -32,18 +73,18 @@ INPUT_NODES = [
|
|||
"label": t("Genehmigung"),
|
||||
"description": t("Benutzer genehmigt oder lehnt ab"),
|
||||
"parameters": [
|
||||
{"name": "title", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "title", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Genehmigungstitel")},
|
||||
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "description", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Was genehmigt werden soll")},
|
||||
{"name": "approvalType", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "approvalType", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["generic", "document"]},
|
||||
"description": t("Typ: document oder generic"), "default": "generic"},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "BoolResult"}},
|
||||
"outputPorts": {0: {"schema": "BoolResult", "dataPickOptions": BOOL_RESULT_DATA_PICK_OPTIONS}},
|
||||
"executor": "input",
|
||||
"meta": {"icon": "mdi-check-decagram", "color": "#4CAF50", "usesAi": False},
|
||||
},
|
||||
|
|
@ -53,20 +94,20 @@ INPUT_NODES = [
|
|||
"label": t("Upload"),
|
||||
"description": t("Benutzer lädt Datei(en) hoch"),
|
||||
"parameters": [
|
||||
{"name": "accept", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "accept", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Accept-String"), "default": ""},
|
||||
{"name": "allowedTypes", "type": "json", "required": False, "frontendType": "multiselect",
|
||||
"frontendOptions": {"options": ["pdf", "docx", "xlsx", "pptx", "txt", "csv", "jpg", "png", "gif"]},
|
||||
"description": t("Ausgewählte Dateitypen"), "default": []},
|
||||
{"name": "maxSize", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "maxSize", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Max. Dateigröße in MB"), "default": 10},
|
||||
{"name": "multiple", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "multiple", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Mehrere Dateien erlauben"), "default": False},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
||||
"outputPorts": {0: {"schema": "DocumentList", "dataPickOptions": DOCUMENT_LIST_DATA_PICK_OPTIONS}},
|
||||
"executor": "input",
|
||||
"meta": {"icon": "mdi-upload", "color": "#2196F3", "usesAi": False},
|
||||
},
|
||||
|
|
@ -76,15 +117,15 @@ INPUT_NODES = [
|
|||
"label": t("Kommentar"),
|
||||
"description": t("Benutzer fügt einen Kommentar hinzu"),
|
||||
"parameters": [
|
||||
{"name": "placeholder", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "placeholder", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Platzhalter"), "default": ""},
|
||||
{"name": "required", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "required", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Kommentar erforderlich"), "default": True},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "TextResult"}},
|
||||
"outputPorts": {0: {"schema": "TextResult", "dataPickOptions": TEXT_RESULT_DATA_PICK_OPTIONS}},
|
||||
"executor": "input",
|
||||
"meta": {"icon": "mdi-comment-text", "color": "#FF9800", "usesAi": False},
|
||||
},
|
||||
|
|
@ -94,16 +135,16 @@ INPUT_NODES = [
|
|||
"label": t("Prüfung"),
|
||||
"description": t("Benutzer prüft Inhalt"),
|
||||
"parameters": [
|
||||
{"name": "contentRef", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "contentRef", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Referenz auf Inhalt")},
|
||||
{"name": "reviewType", "type": "string", "required": False, "frontendType": "select",
|
||||
{"name": "reviewType", "type": "str", "required": False, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["generic", "document"]},
|
||||
"description": t("Art der Prüfung"), "default": "generic"},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "BoolResult"}},
|
||||
"outputPorts": {0: {"schema": "BoolResult", "dataPickOptions": BOOL_RESULT_DATA_PICK_OPTIONS}},
|
||||
"executor": "input",
|
||||
"meta": {"icon": "mdi-magnify-scan", "color": "#673AB7", "usesAi": False},
|
||||
},
|
||||
|
|
@ -115,13 +156,13 @@ INPUT_NODES = [
|
|||
"parameters": [
|
||||
{"name": "options", "type": "json", "required": True, "frontendType": "keyValueRows",
|
||||
"description": t("Optionen"), "default": []},
|
||||
{"name": "multiple", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
{"name": "multiple", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Mehrfachauswahl erlauben"), "default": False},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "TextResult"}},
|
||||
"outputPorts": {0: {"schema": "TextResult", "dataPickOptions": TEXT_RESULT_DATA_PICK_OPTIONS}},
|
||||
"executor": "input",
|
||||
"meta": {"icon": "mdi-format-list-checks", "color": "#009688", "usesAi": False},
|
||||
},
|
||||
|
|
@ -131,17 +172,17 @@ INPUT_NODES = [
|
|||
"label": t("Bestätigung"),
|
||||
"description": t("Benutzer bestätigt Ja/Nein"),
|
||||
"parameters": [
|
||||
{"name": "question", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "question", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Zu bestätigende Frage")},
|
||||
{"name": "confirmLabel", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "confirmLabel", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Label für Bestätigen-Button"), "default": "Confirm"},
|
||||
{"name": "rejectLabel", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "rejectLabel", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Label für Ablehnen-Button"), "default": "Reject"},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "BoolResult"}},
|
||||
"outputPorts": {0: {"schema": "BoolResult", "dataPickOptions": BOOL_RESULT_DATA_PICK_OPTIONS}},
|
||||
"executor": "input",
|
||||
"meta": {"icon": "mdi-checkbox-marked-circle", "color": "#8BC34A", "usesAi": False},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -4,6 +4,21 @@
|
|||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
from modules.features.graphicalEditor.nodeDefinitions.ai import ACTION_RESULT_DATA_PICK_OPTIONS
|
||||
|
||||
# Typed FeatureInstance binding (replaces legacy `string, hidden`).
|
||||
# - type FeatureInstanceRef[redmine] is filtered by the DataPicker.
|
||||
# - frontendType "featureInstance" is rendered by FeatureInstancePicker which
|
||||
# loads /options/feature.instance?featureCode=redmine for the current mandate.
|
||||
_REDMINE_INSTANCE_PARAM = {
|
||||
"name": "featureInstanceId",
|
||||
"type": "FeatureInstanceRef[redmine]",
|
||||
"required": True,
|
||||
"frontendType": "featureInstance",
|
||||
"frontendOptions": {"featureCode": "redmine"},
|
||||
"description": t("Redmine-Mandant"),
|
||||
}
|
||||
|
||||
REDMINE_NODES = [
|
||||
{
|
||||
"id": "redmine.readTicket",
|
||||
|
|
@ -11,15 +26,14 @@ REDMINE_NODES = [
|
|||
"label": t("Ticket lesen"),
|
||||
"description": t("Einzelnes Redmine-Ticket aus dem Mirror laden."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Redmine Feature-Instanz-ID")},
|
||||
{"name": "ticketId", "type": "number", "required": True, "frontendType": "number",
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "ticketId", "type": "int", "required": True, "frontendType": "number",
|
||||
"description": t("Redmine-Ticket-ID")},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-ticket-outline", "color": "#4A6FA5", "usesAi": False},
|
||||
"_method": "redmine",
|
||||
"_action": "readTicket",
|
||||
|
|
@ -30,25 +44,24 @@ REDMINE_NODES = [
|
|||
"label": t("Tickets auflisten"),
|
||||
"description": t("Tickets aus dem lokalen Mirror mit Filtern (Tracker, Status, Zeitraum, Zuweisung)."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Redmine Feature-Instanz-ID")},
|
||||
{"name": "trackerIds", "type": "string", "required": False, "frontendType": "text",
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "trackerIds", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Tracker-IDs (Komma-separiert)"), "default": ""},
|
||||
{"name": "status", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "status", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Status-Filter: open | closed | *"), "default": "*"},
|
||||
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
|
||||
{"name": "dateFrom", "type": "str", "required": False, "frontendType": "date",
|
||||
"description": t("Zeitraum ab (ISO-Datum)"), "default": ""},
|
||||
{"name": "dateTo", "type": "string", "required": False, "frontendType": "date",
|
||||
{"name": "dateTo", "type": "str", "required": False, "frontendType": "date",
|
||||
"description": t("Zeitraum bis (ISO-Datum)"), "default": ""},
|
||||
{"name": "assignedToId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "assignedToId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Nur Tickets dieses Benutzers (ID)")},
|
||||
{"name": "limit", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "limit", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Max. Anzahl Tickets (1-500)"), "default": 100},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-format-list-bulleted", "color": "#4A6FA5", "usesAi": False},
|
||||
"_method": "redmine",
|
||||
"_action": "listTickets",
|
||||
|
|
@ -59,29 +72,28 @@ REDMINE_NODES = [
|
|||
"label": t("Ticket erstellen"),
|
||||
"description": t("Neues Ticket in Redmine anlegen. Mirror wird sofort aktualisiert."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Redmine Feature-Instanz-ID")},
|
||||
{"name": "subject", "type": "string", "required": True, "frontendType": "text",
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "subject", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Ticket-Titel")},
|
||||
{"name": "trackerId", "type": "number", "required": True, "frontendType": "number",
|
||||
{"name": "trackerId", "type": "int", "required": True, "frontendType": "number",
|
||||
"description": t("Tracker-ID (Userstory, Feature, Task, ...)")},
|
||||
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "description", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Ticket-Beschreibung"), "default": ""},
|
||||
{"name": "statusId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "statusId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Status-ID (optional)")},
|
||||
{"name": "priorityId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "priorityId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Prioritaet-ID (optional)")},
|
||||
{"name": "assignedToId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "assignedToId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Zugewiesene Benutzer-ID (optional)")},
|
||||
{"name": "parentIssueId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "parentIssueId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Uebergeordnetes Ticket (optional)")},
|
||||
{"name": "customFields", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "customFields", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Custom Fields als JSON {id: value}"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-ticket-plus-outline", "color": "#4A6FA5", "usesAi": False},
|
||||
"_method": "redmine",
|
||||
"_action": "createTicket",
|
||||
|
|
@ -92,33 +104,32 @@ REDMINE_NODES = [
|
|||
"label": t("Ticket bearbeiten"),
|
||||
"description": t("Felder eines Redmine-Tickets aktualisieren. Nur gesetzte Felder werden uebertragen."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Redmine Feature-Instanz-ID")},
|
||||
{"name": "ticketId", "type": "number", "required": True, "frontendType": "number",
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "ticketId", "type": "int", "required": True, "frontendType": "number",
|
||||
"description": t("Ticket-ID")},
|
||||
{"name": "subject", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "subject", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Neuer Titel")},
|
||||
{"name": "description", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "description", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Neue Beschreibung")},
|
||||
{"name": "trackerId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "trackerId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Neuer Tracker")},
|
||||
{"name": "statusId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "statusId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Neuer Status")},
|
||||
{"name": "priorityId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "priorityId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Neue Prioritaet")},
|
||||
{"name": "assignedToId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "assignedToId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Neue Zuweisung")},
|
||||
{"name": "parentIssueId", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "parentIssueId", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Neues Parent-Ticket")},
|
||||
{"name": "notes", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "notes", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Kommentar (Journal-Eintrag)"), "default": ""},
|
||||
{"name": "customFields", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "customFields", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("Custom Fields als JSON {id: value}"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-ticket-confirmation-outline", "color": "#4A6FA5", "usesAi": False},
|
||||
"_method": "redmine",
|
||||
"_action": "updateTicket",
|
||||
|
|
@ -129,21 +140,20 @@ REDMINE_NODES = [
|
|||
"label": t("Statistik laden"),
|
||||
"description": t("Aggregierte Kennzahlen (KPIs, Durchsatz, Status-Verteilung, Backlog) aus dem Mirror."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Redmine Feature-Instanz-ID")},
|
||||
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "dateFrom", "type": "str", "required": False, "frontendType": "date",
|
||||
"description": t("Zeitraum ab")},
|
||||
{"name": "dateTo", "type": "string", "required": False, "frontendType": "date",
|
||||
{"name": "dateTo", "type": "str", "required": False, "frontendType": "date",
|
||||
"description": t("Zeitraum bis")},
|
||||
{"name": "bucket", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "bucket", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Bucket: day | week | month"), "default": "week"},
|
||||
{"name": "trackerIds", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "trackerIds", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Tracker-IDs (Komma-separiert)"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-chart-bar", "color": "#4A6FA5", "usesAi": False},
|
||||
"_method": "redmine",
|
||||
"_action": "getStats",
|
||||
|
|
@ -154,15 +164,14 @@ REDMINE_NODES = [
|
|||
"label": t("Mirror synchronisieren"),
|
||||
"description": t("Tickets und Beziehungen aus Redmine in den lokalen Mirror uebernehmen."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Redmine Feature-Instanz-ID")},
|
||||
{"name": "force", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
dict(_REDMINE_INSTANCE_PARAM),
|
||||
{"name": "force", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Vollsync erzwingen (ignoriert lastSyncAt)"), "default": False},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-database-sync", "color": "#4A6FA5", "usesAi": False},
|
||||
"_method": "redmine",
|
||||
"_action": "runSync",
|
||||
|
|
|
|||
|
|
@ -3,6 +3,35 @@
|
|||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
from modules.features.graphicalEditor.nodeDefinitions.ai import (
|
||||
ACTION_RESULT_DATA_PICK_OPTIONS,
|
||||
DOCUMENT_LIST_DATA_PICK_OPTIONS,
|
||||
)
|
||||
|
||||
FILE_LIST_DATA_PICK_OPTIONS = [
|
||||
{
|
||||
"path": ["files"],
|
||||
"pickerLabel": t("Alle Dateien"),
|
||||
"detail": t("Die vollständige Dateiliste."),
|
||||
"recommended": True,
|
||||
"type": "List[FileItem]",
|
||||
},
|
||||
{
|
||||
"path": ["files", 0],
|
||||
"pickerLabel": t("Erste Datei"),
|
||||
"detail": t("Das erste Listenelement."),
|
||||
"recommended": False,
|
||||
"type": "FileItem",
|
||||
},
|
||||
{
|
||||
"path": ["count"],
|
||||
"pickerLabel": t("Anzahl"),
|
||||
"detail": t("Anzahl der Dateien."),
|
||||
"recommended": False,
|
||||
"type": "int",
|
||||
},
|
||||
]
|
||||
|
||||
SHAREPOINT_NODES = [
|
||||
{
|
||||
"id": "sharepoint.findFile",
|
||||
|
|
@ -10,20 +39,20 @@ SHAREPOINT_NODES = [
|
|||
"label": t("Datei finden"),
|
||||
"description": t("Datei nach Pfad oder Suche finden"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("SharePoint-Verbindung")},
|
||||
{"name": "searchQuery", "type": "string", "required": True, "frontendType": "text",
|
||||
{"name": "searchQuery", "type": "str", "required": True, "frontendType": "text",
|
||||
"description": t("Suchanfrage oder Pfad")},
|
||||
{"name": "site", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "site", "type": "str", "required": False, "frontendType": "text",
|
||||
"description": t("Optionaler Site-Hinweis"), "default": ""},
|
||||
{"name": "maxResults", "type": "number", "required": False, "frontendType": "number",
|
||||
{"name": "maxResults", "type": "int", "required": False, "frontendType": "number",
|
||||
"description": t("Max Ergebnisse"), "default": 1000},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "FileList"}},
|
||||
"outputPorts": {0: {"schema": "FileList", "dataPickOptions": FILE_LIST_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-file-search", "color": "#0078D4", "usesAi": False},
|
||||
"_method": "sharepoint",
|
||||
"_action": "findDocumentPath",
|
||||
|
|
@ -34,17 +63,17 @@ SHAREPOINT_NODES = [
|
|||
"label": t("Datei lesen"),
|
||||
"description": t("Inhalt aus Datei extrahieren"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("SharePoint-Verbindung")},
|
||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFile",
|
||||
{"name": "pathQuery", "type": "str", "required": True, "frontendType": "sharepointFile",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Dateipfad")},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["FileList", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
||||
"inputPorts": {0: {"accepts": ["FileList", "Transit", "LoopItem"]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList", "dataPickOptions": DOCUMENT_LIST_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-file-document", "color": "#0078D4", "usesAi": False},
|
||||
"_method": "sharepoint",
|
||||
"_action": "readDocuments",
|
||||
|
|
@ -55,17 +84,19 @@ SHAREPOINT_NODES = [
|
|||
"label": t("Datei hochladen"),
|
||||
"description": t("Datei zu SharePoint hochladen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("SharePoint-Verbindung")},
|
||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFolder",
|
||||
{"name": "pathQuery", "type": "str", "required": True, "frontendType": "sharepointFolder",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Zielordner-Pfad")},
|
||||
{"name": "content", "type": "str", "required": True, "frontendType": "hidden",
|
||||
"description": t("Datei-Inhalt aus Upstream-Node (via Wire oder DataRef)"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-upload", "color": "#0078D4", "usesAi": False},
|
||||
"_method": "sharepoint",
|
||||
"_action": "uploadFile",
|
||||
|
|
@ -76,17 +107,17 @@ SHAREPOINT_NODES = [
|
|||
"label": t("Dateien auflisten"),
|
||||
"description": t("Dateien in Ordner auflisten"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("SharePoint-Verbindung")},
|
||||
{"name": "pathQuery", "type": "string", "required": False, "frontendType": "sharepointFolder",
|
||||
{"name": "pathQuery", "type": "str", "required": False, "frontendType": "sharepointFolder",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Ordnerpfad"), "default": "/"},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "FileList"}},
|
||||
"outputPorts": {0: {"schema": "FileList", "dataPickOptions": FILE_LIST_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-folder-open", "color": "#0078D4", "usesAi": False},
|
||||
"_method": "sharepoint",
|
||||
"_action": "listDocuments",
|
||||
|
|
@ -97,17 +128,17 @@ SHAREPOINT_NODES = [
|
|||
"label": t("Datei herunterladen"),
|
||||
"description": t("Datei vom Pfad herunterladen"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("SharePoint-Verbindung")},
|
||||
{"name": "pathQuery", "type": "string", "required": True, "frontendType": "sharepointFile",
|
||||
{"name": "pathQuery", "type": "str", "required": True, "frontendType": "sharepointFile",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Vollständiger Dateipfad")},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["FileList", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
||||
"inputPorts": {0: {"accepts": ["FileList", "Transit", "LoopItem"]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList", "dataPickOptions": DOCUMENT_LIST_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-download", "color": "#0078D4", "usesAi": False},
|
||||
"_method": "sharepoint",
|
||||
"_action": "downloadFileByPath",
|
||||
|
|
@ -118,20 +149,20 @@ SHAREPOINT_NODES = [
|
|||
"label": t("Datei kopieren"),
|
||||
"description": t("Datei an Ziel kopieren"),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": True, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": True, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("SharePoint-Verbindung")},
|
||||
{"name": "sourcePath", "type": "string", "required": True, "frontendType": "sharepointFile",
|
||||
{"name": "sourcePath", "type": "str", "required": True, "frontendType": "sharepointFile",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Quelldatei-Pfad")},
|
||||
{"name": "destPath", "type": "string", "required": True, "frontendType": "sharepointFolder",
|
||||
{"name": "destPath", "type": "str", "required": True, "frontendType": "sharepointFolder",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("Zielordner")},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-content-copy", "color": "#0078D4", "usesAi": False},
|
||||
"_method": "sharepoint",
|
||||
"_action": "copyFile",
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
from modules.features.graphicalEditor.nodeDefinitions.ai import ACTION_RESULT_DATA_PICK_OPTIONS
|
||||
|
||||
TRIGGER_NODES = [
|
||||
{
|
||||
"id": "trigger.manual",
|
||||
|
|
@ -13,7 +15,7 @@ TRIGGER_NODES = [
|
|||
"inputs": 0,
|
||||
"outputs": 1,
|
||||
"inputPorts": {},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"executor": "trigger",
|
||||
"meta": {"icon": "mdi-play", "color": "#4CAF50", "usesAi": False},
|
||||
},
|
||||
|
|
@ -34,7 +36,7 @@ TRIGGER_NODES = [
|
|||
"inputs": 0,
|
||||
"outputs": 1,
|
||||
"inputPorts": {},
|
||||
"outputPorts": {0: {"schema": "FormPayload", "dynamic": True, "deriveFrom": "formFields"}},
|
||||
"outputPorts": {0: {"schema": {"kind": "fromGraph", "parameter": "formFields"}}},
|
||||
"executor": "trigger",
|
||||
"meta": {"icon": "mdi-form-select", "color": "#9C27B0", "usesAi": False},
|
||||
},
|
||||
|
|
@ -46,7 +48,7 @@ TRIGGER_NODES = [
|
|||
"parameters": [
|
||||
{
|
||||
"name": "cron",
|
||||
"type": "string",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"frontendType": "cron",
|
||||
"description": t("Cron-Ausdruck"),
|
||||
|
|
@ -55,7 +57,7 @@ TRIGGER_NODES = [
|
|||
"inputs": 0,
|
||||
"outputs": 1,
|
||||
"inputPorts": {},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"executor": "trigger",
|
||||
"meta": {"icon": "mdi-clock", "color": "#2196F3", "usesAi": False},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -3,6 +3,22 @@
|
|||
|
||||
from modules.shared.i18nRegistry import t
|
||||
|
||||
from modules.features.graphicalEditor.nodeDefinitions.ai import ACTION_RESULT_DATA_PICK_OPTIONS
|
||||
|
||||
# Typed FeatureInstance binding (replaces legacy `string, hidden`).
|
||||
# - type uses the discriminator notation `FeatureInstanceRef[<code>]` so the
|
||||
# DataPicker / RequiredAttributePicker can filter compatible upstream paths.
|
||||
# - frontendType "featureInstance" is rendered by FeatureInstancePicker which
|
||||
# loads /options/feature.instance?featureCode=trustee for the current mandate.
|
||||
_TRUSTEE_INSTANCE_PARAM = {
|
||||
"name": "featureInstanceId",
|
||||
"type": "FeatureInstanceRef[trustee]",
|
||||
"required": True,
|
||||
"frontendType": "featureInstance",
|
||||
"frontendOptions": {"featureCode": "trustee"},
|
||||
"description": t("Trustee-Mandant"),
|
||||
}
|
||||
|
||||
TRUSTEE_NODES = [
|
||||
{
|
||||
"id": "trustee.refreshAccountingData",
|
||||
|
|
@ -10,19 +26,18 @@ TRUSTEE_NODES = [
|
|||
"label": t("Buchhaltungsdaten aktualisieren"),
|
||||
"description": t("Buchhaltungsdaten aus externem System importieren/aktualisieren."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Trustee Feature-Instanz-ID")},
|
||||
{"name": "forceRefresh", "type": "boolean", "required": False, "frontendType": "checkbox",
|
||||
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||
{"name": "forceRefresh", "type": "bool", "required": False, "frontendType": "checkbox",
|
||||
"description": t("Import erzwingen"), "default": False},
|
||||
{"name": "dateFrom", "type": "string", "required": False, "frontendType": "date",
|
||||
{"name": "dateFrom", "type": "str", "required": False, "frontendType": "date",
|
||||
"description": t("Startdatum"), "default": ""},
|
||||
{"name": "dateTo", "type": "string", "required": False, "frontendType": "date",
|
||||
{"name": "dateTo", "type": "str", "required": False, "frontendType": "date",
|
||||
"description": t("Enddatum"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-database-refresh", "color": "#4CAF50", "usesAi": False},
|
||||
"_method": "trustee",
|
||||
"_action": "refreshAccountingData",
|
||||
|
|
@ -33,21 +48,23 @@ TRUSTEE_NODES = [
|
|||
"label": t("Dokumente extrahieren"),
|
||||
"description": t("Dokumenttyp und Daten aus PDF/JPG per AI extrahieren."),
|
||||
"parameters": [
|
||||
{"name": "connectionReference", "type": "string", "required": False, "frontendType": "userConnection",
|
||||
{"name": "connectionReference", "type": "str", "required": False, "frontendType": "userConnection",
|
||||
"frontendOptions": {"authority": "msft"},
|
||||
"description": t("SharePoint-Verbindung"), "default": ""},
|
||||
{"name": "sharepointFolder", "type": "string", "required": False, "frontendType": "sharepointFolder",
|
||||
{"name": "sharepointFolder", "type": "str", "required": False, "frontendType": "sharepointFolder",
|
||||
"frontendOptions": {"dependsOn": "connectionReference"},
|
||||
"description": t("SharePoint-Ordnerpfad"), "default": ""},
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Trustee Feature-Instanz-ID")},
|
||||
{"name": "prompt", "type": "string", "required": False, "frontendType": "textarea",
|
||||
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||
{"name": "prompt", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"description": t("AI-Prompt für Extraktion"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "DocumentList"}},
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit", "AiResult", "LoopItem", "ActionResult"]}},
|
||||
# Runtime returns ActionResult.isSuccess(documents=[...]) — see
|
||||
# actions/extractFromFiles.py. Declaring DocumentList here was adapter
|
||||
# drift and broke the DataPicker for downstream nodes.
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-file-document-scan", "color": "#4CAF50", "usesAi": True},
|
||||
"_method": "trustee",
|
||||
"_action": "extractFromFiles",
|
||||
|
|
@ -58,15 +75,18 @@ TRUSTEE_NODES = [
|
|||
"label": t("Dokumente verarbeiten"),
|
||||
"description": t("TrusteeDocument + TrusteePosition aus Extraktionsergebnis erstellen."),
|
||||
"parameters": [
|
||||
{"name": "documentList", "type": "string", "required": False, "frontendType": "hidden",
|
||||
"description": t("Automatisch via Wire-Verbindung befüllt")},
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Trustee Feature-Instanz-ID")},
|
||||
# Type matches what producers actually emit: ActionResult.documents
|
||||
# is List[ActionDocument] (see datamodelChat.ActionResult). The
|
||||
# DataPicker uses this string to filter compatible upstream paths.
|
||||
{"name": "documentList", "type": "List[ActionDocument]", "required": True, "frontendType": "dataRef",
|
||||
"description": t("Dokumente aus vorherigen Schritten"),
|
||||
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["DocumentList", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"inputPorts": {0: {"accepts": ["ActionResult", "DocumentList", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-file-document-check", "color": "#4CAF50", "usesAi": False},
|
||||
"_method": "trustee",
|
||||
"_action": "processDocuments",
|
||||
|
|
@ -77,15 +97,15 @@ TRUSTEE_NODES = [
|
|||
"label": t("In Buchhaltung synchronisieren"),
|
||||
"description": t("Trustee-Positionen in Buchhaltungssystem übertragen."),
|
||||
"parameters": [
|
||||
{"name": "documentList", "type": "string", "required": False, "frontendType": "hidden",
|
||||
"description": t("Automatisch via Wire-Verbindung befüllt")},
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Trustee Feature-Instanz-ID")},
|
||||
{"name": "documentList", "type": "List[ActionDocument]", "required": True, "frontendType": "dataRef",
|
||||
"description": t("Dokumente aus vorherigen Schritten"),
|
||||
"graphInherit": {"port": 0, "kind": "documentListWire"}},
|
||||
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"inputPorts": {0: {"accepts": ["ActionResult", "DocumentList", "Transit"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-calculator", "color": "#4CAF50", "usesAi": False},
|
||||
"_method": "trustee",
|
||||
"_action": "syncToAccounting",
|
||||
|
|
@ -96,34 +116,33 @@ TRUSTEE_NODES = [
|
|||
"label": t("Treuhand-Daten abfragen"),
|
||||
"description": t("Daten aus der Trustee-DB lesen (Lookup, Aggregation, Roh-Export). Pendant zu refreshAccountingData ohne externen Sync."),
|
||||
"parameters": [
|
||||
{"name": "featureInstanceId", "type": "string", "required": True, "frontendType": "hidden",
|
||||
"description": t("Trustee Feature-Instanz-ID")},
|
||||
{"name": "mode", "type": "string", "required": True, "frontendType": "select",
|
||||
dict(_TRUSTEE_INSTANCE_PARAM),
|
||||
{"name": "mode", "type": "str", "required": True, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["lookup", "raw", "aggregate"]},
|
||||
"description": t("Abfragemodus"), "default": "lookup"},
|
||||
{"name": "entity", "type": "string", "required": True, "frontendType": "select",
|
||||
{"name": "entity", "type": "str", "required": True, "frontendType": "select",
|
||||
"frontendOptions": {"options": ["tenantWithRent", "contact", "journalLines", "accounts", "balances"]},
|
||||
"description": t("Entität, die gelesen werden soll"), "default": "tenantWithRent"},
|
||||
{"name": "tenantNameRef", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "tenantNameRef", "type": "str", "required": False, "frontendType": "text",
|
||||
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent", "contact"]},
|
||||
"description": t("Mietername (oder {{wire.feld}} aus Upstream)"), "default": ""},
|
||||
{"name": "tenantAddressRef", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "tenantAddressRef", "type": "str", "required": False, "frontendType": "text",
|
||||
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent", "contact"]},
|
||||
"description": t("Mieteradresse (Toleranz für Tippfehler)"), "default": ""},
|
||||
{"name": "period", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "period", "type": "str", "required": False, "frontendType": "text",
|
||||
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent", "journalLines", "balances"]},
|
||||
"description": t("Zeitraum (YYYY oder YYYY-MM-DD/YYYY-MM-DD)"), "default": ""},
|
||||
{"name": "rentAccountPattern", "type": "string", "required": False, "frontendType": "text",
|
||||
{"name": "rentAccountPattern", "type": "str", "required": False, "frontendType": "text",
|
||||
"frontendOptions": {"dependsOn": "entity", "showWhen": ["tenantWithRent"]},
|
||||
"description": t("Konto-Filter für Mietzins (z.B. '6000-6099' oder '6*')"), "default": ""},
|
||||
{"name": "filterJson", "type": "string", "required": False, "frontendType": "textarea",
|
||||
{"name": "filterJson", "type": "str", "required": False, "frontendType": "textarea",
|
||||
"frontendOptions": {"dependsOn": "mode", "showWhen": ["raw", "aggregate"]},
|
||||
"description": t("Optionaler JSON-Filter für mode=raw/aggregate"), "default": ""},
|
||||
],
|
||||
"inputs": 1,
|
||||
"outputs": 1,
|
||||
"inputPorts": {0: {"accepts": ["Transit", "AiResult", "ConsolidateResult"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult"}},
|
||||
"inputPorts": {0: {"accepts": ["Transit", "AiResult", "ConsolidateResult", "UdmDocument"]}},
|
||||
"outputPorts": {0: {"schema": "ActionResult", "dataPickOptions": ACTION_RESULT_DATA_PICK_OPTIONS}},
|
||||
"meta": {"icon": "mdi-database-search", "color": "#4CAF50", "usesAi": False},
|
||||
"_method": "trustee",
|
||||
"_action": "queryData",
|
||||
|
|
|
|||
|
|
@ -6,9 +6,11 @@ Nodes are defined first; IO/method actions are used at execution time.
|
|||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Any
|
||||
from typing import Dict, List, Any, Optional
|
||||
|
||||
from modules.features.graphicalEditor.nodeDefinitions import STATIC_NODE_TYPES
|
||||
from modules.features.graphicalEditor.nodeDefinitions.input import FORM_FIELD_TYPES
|
||||
from modules.features.graphicalEditor.nodeAdapter import bindsActionFromLegacy
|
||||
from modules.features.graphicalEditor.portTypes import PORT_TYPE_CATALOG, SYSTEM_VARIABLES
|
||||
from modules.shared.i18nRegistry import normalizePrimaryLanguageTag, resolveText
|
||||
|
||||
|
|
@ -41,12 +43,21 @@ def _pickFromLangMap(d: Any, lang: str) -> Any:
|
|||
|
||||
|
||||
def _localizeNode(node: Dict[str, Any], language: str) -> Dict[str, Any]:
|
||||
"""Apply request language via resolveText (t() keys + multilingual dicts)."""
|
||||
"""Apply request language via resolveText (t() keys + multilingual dicts).
|
||||
|
||||
Also exposes Schicht-3 metadata (`bindsAction`) derived from the legacy
|
||||
`_method`/`_action` pair, so frontend consumers can resolve back to the
|
||||
Schicht-2 Action signature without parsing internal underscore-prefixed
|
||||
fields.
|
||||
"""
|
||||
lang = normalizePrimaryLanguageTag(language, "en")
|
||||
bindsAction = bindsActionFromLegacy(node)
|
||||
out = dict(node)
|
||||
for key in list(out.keys()):
|
||||
if key.startswith("_"):
|
||||
del out[key]
|
||||
if bindsAction:
|
||||
out["bindsAction"] = bindsAction
|
||||
lbl = node.get("label")
|
||||
if lbl is not None:
|
||||
out["label"] = resolveText(lbl, lang) or node.get("id", "")
|
||||
|
|
@ -71,6 +82,34 @@ def _localizeNode(node: Dict[str, Any], language: str) -> Dict[str, Any]:
|
|||
pc["description"] = resolveText(pd, lang)
|
||||
params.append(pc)
|
||||
out["parameters"] = params
|
||||
|
||||
out_ports: Dict[Any, Dict[str, Any]] = {}
|
||||
for idx, po in (node.get("outputPorts") or {}).items():
|
||||
if not isinstance(po, dict):
|
||||
continue
|
||||
port_copy = dict(po)
|
||||
opts = port_copy.get("dataPickOptions")
|
||||
if isinstance(opts, list):
|
||||
loc_opts: List[Dict[str, Any]] = []
|
||||
for o in opts:
|
||||
if not isinstance(o, dict):
|
||||
continue
|
||||
oc = dict(o)
|
||||
pl = oc.get("pickerLabel")
|
||||
if pl is not None:
|
||||
oc["pickerLabel"] = resolveText(pl, lang)
|
||||
dt = oc.get("detail")
|
||||
if dt is not None:
|
||||
oc["detail"] = resolveText(dt, lang)
|
||||
loc_opts.append(oc)
|
||||
port_copy["dataPickOptions"] = loc_opts
|
||||
out_ports[idx] = port_copy
|
||||
if isinstance(node.get("outputPorts"), dict):
|
||||
out["outputPorts"] = out_ports
|
||||
|
||||
# Legacy node-level key no longer used — do not expose.
|
||||
out.pop("outputPickHints", None)
|
||||
|
||||
return out
|
||||
|
||||
|
||||
|
|
@ -101,7 +140,7 @@ def getNodeTypesForApi(
|
|||
for name, schema in PORT_TYPE_CATALOG.items():
|
||||
catalogSerialized[name] = {
|
||||
"name": schema.name,
|
||||
"fields": [f.model_dump() for f in schema.fields],
|
||||
"fields": [f.model_dump(by_alias=True, exclude_none=True) for f in schema.fields],
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
@ -109,6 +148,7 @@ def getNodeTypesForApi(
|
|||
"categories": categories,
|
||||
"portTypeCatalog": catalogSerialized,
|
||||
"systemVariables": SYSTEM_VARIABLES,
|
||||
"formFieldTypes": FORM_FIELD_TYPES,
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -124,3 +164,46 @@ def getNodeTypeToMethodAction() -> Dict[str, tuple]:
|
|||
if method and action:
|
||||
mapping[node["id"]] = (method, action)
|
||||
return mapping
|
||||
|
||||
|
||||
def validateAdaptersAgainstMethods(methodInstances: Optional[Dict[str, Any]] = None) -> Optional[str]:
|
||||
"""Run the Schicht-3 Adapter validator (5 drift rules) against the live methods.
|
||||
|
||||
Intended to be called once at startup after methodDiscovery has populated
|
||||
the methods registry. Returns a human-readable report (None when healthy)
|
||||
so the caller decides whether to log, raise, or surface to operators.
|
||||
|
||||
Pass `methodInstances` directly for testability; defaults to importing
|
||||
the live registry from `methodDiscovery.methods`.
|
||||
"""
|
||||
from modules.features.graphicalEditor.adapterValidator import (
|
||||
_buildActionsRegistryFromMethods,
|
||||
_formatAdapterReport,
|
||||
_validateAllAdapters,
|
||||
)
|
||||
|
||||
if methodInstances is None:
|
||||
try:
|
||||
from modules.workflows.processing.shared.methodDiscovery import methods
|
||||
except Exception as exc:
|
||||
logger.warning("Adapter validator skipped: cannot import methodDiscovery (%s)", exc)
|
||||
return None
|
||||
|
||||
methodInstances = {}
|
||||
for fullName, info in (methods or {}).items():
|
||||
shortName = fullName.replace("Method", "").lower() if fullName[:1].isupper() else fullName
|
||||
instance = info.get("instance") if isinstance(info, dict) else None
|
||||
if instance is not None:
|
||||
methodInstances[shortName] = instance
|
||||
|
||||
if not methodInstances:
|
||||
return None
|
||||
|
||||
actionsRegistry = _buildActionsRegistryFromMethods(methodInstances)
|
||||
report = _validateAllAdapters(list(STATIC_NODE_TYPES), actionsRegistry)
|
||||
formatted = _formatAdapterReport(report)
|
||||
if not report.isHealthy:
|
||||
logger.warning("[adapterValidator] %s", formatted)
|
||||
elif report.warnings:
|
||||
logger.info("[adapterValidator] %s", formatted)
|
||||
return formatted
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -14,7 +14,7 @@ from fastapi import APIRouter, Depends, Path, Query, Body, Request, HTTPExceptio
|
|||
from fastapi.responses import JSONResponse, StreamingResponse, Response
|
||||
from modules.auth import limiter, getRequestContext, RequestContext
|
||||
from modules.datamodels.datamodelPagination import PaginationParams, PaginationMetadata, normalize_pagination_dict
|
||||
from modules.routes.routeHelpers import _applyFiltersAndSort
|
||||
from modules.routes.routeHelpers import applyFiltersAndSort
|
||||
|
||||
from modules.features.graphicalEditor.mainGraphicalEditor import getGraphicalEditorServices
|
||||
from modules.features.graphicalEditor.nodeRegistry import getNodeTypesForApi
|
||||
|
|
@ -26,6 +26,7 @@ from modules.workflows.automation2.runEnvelope import (
|
|||
normalize_run_envelope,
|
||||
)
|
||||
from modules.features.graphicalEditor.entryPoints import find_invocation
|
||||
from modules.features.graphicalEditor.upstreamPathsService import compute_upstream_paths
|
||||
from modules.shared.i18nRegistry import apiRouteContext, resolveText
|
||||
routeApiMsg = apiRouteContext("routeFeatureGraphicalEditor")
|
||||
|
||||
|
|
@ -110,6 +111,44 @@ def _validateInstanceAccess(instanceId: str, context: RequestContext) -> str:
|
|||
return str(instance.mandateId) if instance.mandateId else ""
|
||||
|
||||
|
||||
def _validateTargetInstance(
|
||||
workflowData: Dict[str, Any],
|
||||
ownerInstanceId: str,
|
||||
context: RequestContext,
|
||||
) -> None:
|
||||
"""Enforce targetFeatureInstanceId rules for non-template workflows.
|
||||
|
||||
- Templates (isTemplate=True) may omit targetFeatureInstanceId.
|
||||
- Non-templates MUST have a non-empty targetFeatureInstanceId.
|
||||
- If the targetFeatureInstanceId differs from the GE owner instance,
|
||||
the user must also have FeatureAccess on that target instance.
|
||||
"""
|
||||
if workflowData.get("isTemplate"):
|
||||
return
|
||||
|
||||
targetId = workflowData.get("targetFeatureInstanceId")
|
||||
if not targetId:
|
||||
return
|
||||
|
||||
if targetId == ownerInstanceId:
|
||||
return
|
||||
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
rootInterface = getRootInterface()
|
||||
targetInstance = rootInterface.getFeatureInstance(targetId)
|
||||
if not targetInstance:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=routeApiMsg("targetFeatureInstanceId refers to a non-existent feature instance"),
|
||||
)
|
||||
targetAccess = rootInterface.getFeatureAccess(str(context.user.id), targetId)
|
||||
if not targetAccess or not targetAccess.enabled:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail=routeApiMsg("Access denied to target feature instance"),
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{instanceId}/node-types")
|
||||
@limiter.limit("60/minute")
|
||||
def get_node_types(
|
||||
|
|
@ -135,6 +174,48 @@ def get_node_types(
|
|||
return result
|
||||
|
||||
|
||||
@router.post("/{instanceId}/upstream-paths")
|
||||
@limiter.limit("60/minute")
|
||||
def post_upstream_paths(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
body: Dict[str, Any] = Body(...),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Return pickable upstream DataRef paths for a node (draft graph in body)."""
|
||||
_validateInstanceAccess(instanceId, context)
|
||||
graph = body.get("graph")
|
||||
node_id = body.get("nodeId")
|
||||
if not isinstance(graph, dict) or not node_id:
|
||||
raise HTTPException(status_code=400, detail=routeApiMsg("graph and nodeId are required"))
|
||||
paths = compute_upstream_paths(graph, str(node_id))
|
||||
return {"paths": paths}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/upstream-paths/{node_id}")
|
||||
@limiter.limit("60/minute")
|
||||
def get_upstream_paths_saved(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
node_id: str = Path(..., description="Target node id"),
|
||||
workflowId: str = Query(..., description="Workflow id whose saved graph is used"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Return upstream paths using the persisted workflow graph (same payload as POST variant)."""
|
||||
mandate_id = _validateInstanceAccess(instanceId, context)
|
||||
if not workflowId:
|
||||
raise HTTPException(status_code=400, detail=routeApiMsg("workflowId is required"))
|
||||
from modules.features.graphicalEditor.interfaceFeatureGraphicalEditor import getGraphicalEditorInterface
|
||||
|
||||
iface = getGraphicalEditorInterface(context.user, mandate_id, featureInstanceId=instanceId)
|
||||
wf = iface.getWorkflow(workflowId)
|
||||
if not wf:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Workflow not found"))
|
||||
graph = wf.get("graph") or {}
|
||||
paths = compute_upstream_paths(graph if isinstance(graph, dict) else {}, str(node_id))
|
||||
return {"paths": paths}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/options/user.connection")
|
||||
@limiter.limit("60/minute")
|
||||
def get_user_connection_options(
|
||||
|
|
@ -187,6 +268,65 @@ def get_user_connection_options(
|
|||
return {"options": options}
|
||||
|
||||
|
||||
@router.get("/{instanceId}/options/feature.instance")
|
||||
@limiter.limit("60/minute")
|
||||
def get_feature_instance_options(
|
||||
request: Request,
|
||||
instanceId: str = Path(..., description="GraphicalEditor feature instance ID (workflow context)"),
|
||||
featureCode: str = Query(..., description="Feature code to filter by (e.g. 'trustee', 'redmine', 'clickup')"),
|
||||
enabledOnly: bool = Query(True, description="If true (default), only enabled feature instances are returned"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
) -> dict:
|
||||
"""Return mandate-scoped FeatureInstances for the given featureCode.
|
||||
|
||||
Used by node parameters with frontendType='featureInstance' (e.g. Trustee
|
||||
or Redmine nodes that need to bind to a specific tenant FeatureInstance).
|
||||
Always restricted to the calling user's mandate (derived from the workflow
|
||||
feature instance) so the picker never leaks foreign-mandate instances.
|
||||
|
||||
Response: { options: [ { value: "<id>", label: "<displayName> ([code])" } ] }
|
||||
"""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
if not context.user:
|
||||
raise HTTPException(status_code=401, detail=routeApiMsg("Authentication required"))
|
||||
code = (featureCode or "").strip().lower()
|
||||
if not code:
|
||||
raise HTTPException(status_code=400, detail=routeApiMsg("featureCode query parameter is required"))
|
||||
if not mandateId:
|
||||
return {"options": []}
|
||||
|
||||
from modules.interfaces.interfaceDbApp import getRootInterface
|
||||
rootInterface = getRootInterface()
|
||||
try:
|
||||
instances = rootInterface.getFeatureInstancesByMandate(
|
||||
mandateId, enabledOnly=bool(enabledOnly)
|
||||
) or []
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"get_feature_instance_options: failed to load instances mandateId=%s: %s",
|
||||
mandateId, e, exc_info=True,
|
||||
)
|
||||
return {"options": []}
|
||||
|
||||
options: List[Dict[str, str]] = []
|
||||
for fi in instances:
|
||||
fiCode = (getattr(fi, "featureCode", "") or "").strip().lower()
|
||||
if fiCode != code:
|
||||
continue
|
||||
fiId = str(getattr(fi, "id", "") or "")
|
||||
if not fiId:
|
||||
continue
|
||||
rawLabel = getattr(fi, "label", None) or getattr(fi, "name", None) or fiId
|
||||
options.append({"value": fiId, "label": f"{rawLabel} ({fiCode})"})
|
||||
|
||||
logger.info(
|
||||
"graphicalEditor feature.instance options: instanceId=%s mandateId=%s "
|
||||
"featureCode=%s enabledOnly=%s -> %d options",
|
||||
instanceId, mandateId, code, enabledOnly, len(options),
|
||||
)
|
||||
return {"options": options}
|
||||
|
||||
|
||||
@router.post("/{instanceId}/execute")
|
||||
@limiter.limit("30/minute")
|
||||
async def post_execute(
|
||||
|
|
@ -216,9 +356,12 @@ async def post_execute(
|
|||
workflowId = body.get("workflowId")
|
||||
req_nodes = graph.get("nodes") or []
|
||||
workflow_for_envelope: Optional[Dict[str, Any]] = None
|
||||
targetFeatureInstanceId: Optional[str] = None
|
||||
if workflowId and not str(workflowId).startswith("transient-"):
|
||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||
workflow_for_envelope = iface.getWorkflow(workflowId)
|
||||
if workflow_for_envelope:
|
||||
targetFeatureInstanceId = workflow_for_envelope.get("targetFeatureInstanceId")
|
||||
if workflowId and len(req_nodes) == 0:
|
||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||
wf = iface.getWorkflow(workflowId)
|
||||
|
|
@ -226,10 +369,18 @@ async def post_execute(
|
|||
graph = wf["graph"]
|
||||
logger.info("graphicalEditor execute: loaded graph from workflow %s", workflowId)
|
||||
workflow_for_envelope = wf
|
||||
targetFeatureInstanceId = wf.get("targetFeatureInstanceId")
|
||||
if not workflowId:
|
||||
import uuid
|
||||
workflowId = f"transient-{uuid.uuid4().hex[:12]}"
|
||||
logger.info("graphicalEditor execute: using transient workflowId=%s", workflowId)
|
||||
|
||||
if targetFeatureInstanceId and targetFeatureInstanceId != instanceId:
|
||||
_validateTargetInstance(
|
||||
{"targetFeatureInstanceId": targetFeatureInstanceId},
|
||||
instanceId,
|
||||
context,
|
||||
)
|
||||
nodes_count = len(graph.get("nodes") or [])
|
||||
connections_count = len(graph.get("connections") or [])
|
||||
logger.info(
|
||||
|
|
@ -261,6 +412,7 @@ async def post_execute(
|
|||
automation2_interface=ge_interface,
|
||||
run_envelope=run_env,
|
||||
label=_wfLabel,
|
||||
targetFeatureInstanceId=targetFeatureInstanceId,
|
||||
)
|
||||
logger.info(
|
||||
"graphicalEditor execute result: success=%s error=%s nodeOutputs_keys=%s failedNode=%s paused=%s",
|
||||
|
|
@ -424,13 +576,35 @@ def get_templates(
|
|||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
scope: Optional[str] = Query(None, description="Filter by scope: user, instance, mandate, system"),
|
||||
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
|
||||
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
|
||||
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""List workflow templates with optional pagination."""
|
||||
"""List workflow templates with optional pagination.
|
||||
|
||||
Supports the FormGeneratorTable backend pattern:
|
||||
- default: paginated/filtered/sorted ``{items, pagination}`` response
|
||||
- ``mode=filterValues&column=X``: distinct values for column X (cross-filtered)
|
||||
- ``mode=ids``: all IDs matching current filters
|
||||
"""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||
templates = iface.getTemplates(scope=scope)
|
||||
|
||||
from modules.routes.routeHelpers import enrichRowsWithFkLabels
|
||||
from modules.features.graphicalEditor.datamodelFeatureGraphicalEditor import AutoWorkflow
|
||||
enrichRowsWithFkLabels(templates, AutoWorkflow)
|
||||
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory
|
||||
return handleFilterValuesInMemory(templates, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
from modules.routes.routeHelpers import handleIdsInMemory
|
||||
return handleIdsInMemory(templates, pagination)
|
||||
|
||||
paginationParams = None
|
||||
if pagination:
|
||||
try:
|
||||
|
|
@ -442,7 +616,7 @@ def get_templates(
|
|||
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
||||
|
||||
if paginationParams:
|
||||
filtered = _applyFiltersAndSort(templates, paginationParams)
|
||||
filtered = applyFiltersAndSort(templates, paginationParams)
|
||||
totalItems = len(filtered)
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
|
|
@ -813,6 +987,7 @@ async def _runEditorAgent(
|
|||
"\n\nAvailable tools (all valid — use whichever the user's intent calls for):"
|
||||
"\n Graph-mutating: readWorkflowGraph, listAvailableNodeTypes, "
|
||||
"describeNodeType, addNode, removeNode, connectNodes, setNodeParameter, "
|
||||
"listUpstreamPaths, bindNodeParameter, "
|
||||
"autoLayoutWorkflow, validateGraph."
|
||||
"\n Workflow lifecycle: createWorkflow (new empty workflow), "
|
||||
"updateWorkflowMetadata (rename / change description / tags / activate), "
|
||||
|
|
@ -844,6 +1019,8 @@ async def _runEditorAgent(
|
|||
"description, sane defaults, or — for required user-connection fields — "
|
||||
"an actual connectionId). Do NOT pass position; the layout step handles it."
|
||||
"\n6. connectNodes — wire the nodes consistent with port schemas from describeNodeType."
|
||||
"\n6b. When a parameter must take data from an upstream node, call listUpstreamPaths(nodeId=target) "
|
||||
"then bindNodeParameter(producerNodeId, path, parameterName) — do not rely on implicit wire fill."
|
||||
"\n7. autoLayoutWorkflow — call exactly once as the LAST graph-mutating step so the "
|
||||
"canvas shows a readable top-down layout instead of overlapping boxes."
|
||||
"\n8. validateGraph — sanity check, then answer the user."
|
||||
|
|
@ -860,15 +1037,15 @@ async def _runEditorAgent(
|
|||
|
||||
enrichedPrompt = prompt
|
||||
if dataSourceIds:
|
||||
from modules.features.workspace.routeFeatureWorkspace import _buildDataSourceContext
|
||||
from modules.features.workspace.routeFeatureWorkspace import buildDataSourceContext
|
||||
chatSvc = getService("chat", ctx)
|
||||
dsInfo = _buildDataSourceContext(chatSvc, dataSourceIds)
|
||||
dsInfo = buildDataSourceContext(chatSvc, dataSourceIds)
|
||||
if dsInfo:
|
||||
enrichedPrompt = f"{prompt}\n\n[Active Data Sources]\n{dsInfo}"
|
||||
|
||||
if featureDataSourceIds:
|
||||
from modules.features.workspace.routeFeatureWorkspace import _buildFeatureDataSourceContext
|
||||
fdsInfo = _buildFeatureDataSourceContext(featureDataSourceIds)
|
||||
from modules.features.workspace.routeFeatureWorkspace import buildFeatureDataSourceContext
|
||||
fdsInfo = buildFeatureDataSourceContext(featureDataSourceIds)
|
||||
if fdsInfo:
|
||||
enrichedPrompt = f"{enrichedPrompt}\n\n[Attached Feature Data Sources]\n{fdsInfo}"
|
||||
|
||||
|
|
@ -1033,6 +1210,9 @@ async def list_connection_services(
|
|||
"drive": "Google Drive",
|
||||
"gmail": "Gmail",
|
||||
"files": "Files (FTP)",
|
||||
"kdrive": "kDrive",
|
||||
"calendar": "Calendar",
|
||||
"contact": "Contacts",
|
||||
}
|
||||
_serviceIcons = {
|
||||
"sharepoint": "sharepoint",
|
||||
|
|
@ -1043,6 +1223,9 @@ async def list_connection_services(
|
|||
"drive": "cloud",
|
||||
"gmail": "mail",
|
||||
"files": "folder",
|
||||
"kdrive": "cloud",
|
||||
"calendar": "calendar",
|
||||
"contact": "contact",
|
||||
}
|
||||
items = [
|
||||
{"service": s, "label": _serviceLabels.get(s, s), "icon": _serviceIcons.get(s, "folder")}
|
||||
|
|
@ -1133,9 +1316,17 @@ def get_workflows(
|
|||
instanceId: str = Path(..., description="Feature instance ID"),
|
||||
active: Optional[bool] = Query(None, description="Filter by active: true|false"),
|
||||
pagination: Optional[str] = Query(None, description="JSON-encoded PaginationParams object"),
|
||||
mode: Optional[str] = Query(None, description="'filterValues' for distinct column values, 'ids' for all filtered IDs"),
|
||||
column: Optional[str] = Query(None, description="Column key (required when mode=filterValues)"),
|
||||
context: RequestContext = Depends(getRequestContext),
|
||||
):
|
||||
"""List all workflows for this feature instance."""
|
||||
"""List all workflows for this feature instance.
|
||||
|
||||
Supports the FormGeneratorTable backend pattern:
|
||||
- default: paginated/filtered/sorted ``{items, pagination}`` response
|
||||
- ``mode=filterValues&column=X``: distinct values for column X (cross-filtered)
|
||||
- ``mode=ids``: all IDs matching current filters (for "select all")
|
||||
"""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||
items = iface.getWorkflows(active=active)
|
||||
|
|
@ -1163,10 +1354,19 @@ def get_workflows(
|
|||
"runStatus": active_run.get("status") if active_run else None,
|
||||
"stuckAtNodeId": stuck_at_node_id,
|
||||
"stuckAtNodeLabel": stuck_at_node_label or stuck_at_node_id or "",
|
||||
"createdAt": wf.get("sysCreatedAt"),
|
||||
"lastStartedAt": last_started_at,
|
||||
})
|
||||
|
||||
if mode == "filterValues":
|
||||
if not column:
|
||||
raise HTTPException(status_code=400, detail="column parameter required for mode=filterValues")
|
||||
from modules.routes.routeHelpers import handleFilterValuesInMemory
|
||||
return handleFilterValuesInMemory(enriched, column, pagination)
|
||||
|
||||
if mode == "ids":
|
||||
from modules.routes.routeHelpers import handleIdsInMemory
|
||||
return handleIdsInMemory(enriched, pagination)
|
||||
|
||||
paginationParams = None
|
||||
if pagination:
|
||||
try:
|
||||
|
|
@ -1178,7 +1378,7 @@ def get_workflows(
|
|||
raise HTTPException(status_code=400, detail=f"Invalid pagination parameter: {str(e)}")
|
||||
|
||||
if paginationParams:
|
||||
filtered = _applyFiltersAndSort(enriched, paginationParams)
|
||||
filtered = applyFiltersAndSort(enriched, paginationParams)
|
||||
totalItems = len(filtered)
|
||||
totalPages = math.ceil(totalItems / paginationParams.pageSize) if totalItems > 0 else 0
|
||||
startIdx = (paginationParams.page - 1) * paginationParams.pageSize
|
||||
|
|
@ -1221,6 +1421,7 @@ def create_workflow(
|
|||
) -> dict:
|
||||
"""Create a new workflow."""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
_validateTargetInstance(body, instanceId, context)
|
||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||
created = iface.createWorkflow(body)
|
||||
return created
|
||||
|
|
@ -1238,6 +1439,11 @@ def update_workflow(
|
|||
"""Update a workflow."""
|
||||
mandateId = _validateInstanceAccess(instanceId, context)
|
||||
iface = getGraphicalEditorInterface(context.user, mandateId, instanceId)
|
||||
existing = iface.getWorkflow(workflowId)
|
||||
if not existing:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Workflow not found"))
|
||||
merged = {**existing, **body}
|
||||
_validateTargetInstance(merged, instanceId, context)
|
||||
updated = iface.updateWorkflow(workflowId, body)
|
||||
if not updated:
|
||||
raise HTTPException(status_code=404, detail=routeApiMsg("Workflow not found"))
|
||||
|
|
|
|||
162
modules/features/graphicalEditor/upstreamPathsService.py
Normal file
162
modules/features/graphicalEditor/upstreamPathsService.py
Normal file
|
|
@ -0,0 +1,162 @@
|
|||
# Copyright (c) 2025 Patrick Motsch
|
||||
"""Compute pickable upstream paths for DataPicker / AI workflow tools."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, List, Set
|
||||
|
||||
from modules.features.graphicalEditor.nodeDefinitions import STATIC_NODE_TYPES
|
||||
from modules.features.graphicalEditor.portTypes import PORT_TYPE_CATALOG, PortSchema, parse_graph_defined_output_schema
|
||||
from modules.workflows.automation2.graphUtils import buildConnectionMap
|
||||
|
||||
_NODE_BY_TYPE = {n["id"]: n for n in STATIC_NODE_TYPES}
|
||||
|
||||
|
||||
def _paths_for_port_schema(schema: PortSchema, producer_node_id: str) -> List[Dict[str, Any]]:
|
||||
out: List[Dict[str, Any]] = []
|
||||
for field in schema.fields:
|
||||
path = [field.name]
|
||||
out.append(
|
||||
{
|
||||
"producerNodeId": producer_node_id,
|
||||
"path": path,
|
||||
"type": field.type,
|
||||
"label": ".".join(str(p) for p in path),
|
||||
"scopeOrigin": "data",
|
||||
}
|
||||
)
|
||||
out.append(
|
||||
{
|
||||
"producerNodeId": producer_node_id,
|
||||
"path": [],
|
||||
"type": schema.name,
|
||||
"label": "(whole output)",
|
||||
"scopeOrigin": "data",
|
||||
}
|
||||
)
|
||||
return out
|
||||
|
||||
|
||||
def _paths_for_data_pick_options(
|
||||
options: List[Dict[str, Any]],
|
||||
producer_node_id: str,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Explicit per-port pick list from node definition (authoritative; no catalog expansion)."""
|
||||
out: List[Dict[str, Any]] = []
|
||||
for o in options:
|
||||
if not isinstance(o, dict):
|
||||
continue
|
||||
path = o.get("path")
|
||||
if not isinstance(path, list):
|
||||
continue
|
||||
label = o.get("pickerLabel")
|
||||
out.append(
|
||||
{
|
||||
"producerNodeId": producer_node_id,
|
||||
"path": path,
|
||||
"type": o.get("type") or "Any",
|
||||
"label": label if isinstance(label, str) else ".".join(str(p) for p in path),
|
||||
"scopeOrigin": "data",
|
||||
}
|
||||
)
|
||||
return out
|
||||
|
||||
|
||||
def _paths_for_schema(schema_name: str, producer_node_id: str) -> List[Dict[str, Any]]:
|
||||
if not schema_name or schema_name == "Transit":
|
||||
return []
|
||||
schema = PORT_TYPE_CATALOG.get(schema_name)
|
||||
if not schema:
|
||||
return []
|
||||
return _paths_for_port_schema(schema, producer_node_id)
|
||||
|
||||
|
||||
def compute_upstream_paths(graph: Dict[str, Any], target_node_id: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Return flattened first-level paths for every ancestor node's primary output schema.
|
||||
"""
|
||||
nodes = graph.get("nodes") or []
|
||||
connections = graph.get("connections") or []
|
||||
node_by_id = {n["id"]: n for n in nodes if n.get("id")}
|
||||
if target_node_id not in node_by_id:
|
||||
return []
|
||||
|
||||
conn_map = buildConnectionMap(connections)
|
||||
# predecessors: walk backwards along edges (target -> source)
|
||||
preds: Dict[str, Set[str]] = {}
|
||||
for tgt, pairs in conn_map.items():
|
||||
for src, _, _ in pairs:
|
||||
preds.setdefault(tgt, set()).add(src)
|
||||
|
||||
seen: Set[str] = set()
|
||||
stack = [target_node_id]
|
||||
ancestors: Set[str] = set()
|
||||
while stack:
|
||||
cur = stack.pop()
|
||||
for p in preds.get(cur, ()):
|
||||
if p not in seen:
|
||||
seen.add(p)
|
||||
ancestors.add(p)
|
||||
stack.append(p)
|
||||
|
||||
paths: List[Dict[str, Any]] = []
|
||||
for aid in sorted(ancestors):
|
||||
anode = node_by_id.get(aid)
|
||||
if not anode:
|
||||
continue
|
||||
nt = anode.get("type", "")
|
||||
ndef = _NODE_BY_TYPE.get(nt)
|
||||
if not ndef:
|
||||
continue
|
||||
out0 = (ndef.get("outputPorts") or {}).get(0, {})
|
||||
out0 = out0 if isinstance(out0, dict) else {}
|
||||
dpo = out0.get("dataPickOptions")
|
||||
if isinstance(dpo, list) and len(dpo) > 0:
|
||||
plab = (anode.get("title") or "").strip() or aid
|
||||
for entry in _paths_for_data_pick_options(dpo, aid):
|
||||
entry["producerLabel"] = plab
|
||||
paths.append(entry)
|
||||
continue
|
||||
|
||||
derived = parse_graph_defined_output_schema(anode, out0)
|
||||
if derived:
|
||||
for entry in _paths_for_port_schema(derived, aid):
|
||||
entry["producerLabel"] = (anode.get("title") or "").strip() or aid
|
||||
paths.append(entry)
|
||||
else:
|
||||
raw_schema = out0.get("schema") if isinstance(out0, dict) else None
|
||||
schema_name = raw_schema if isinstance(raw_schema, str) and raw_schema else "ActionResult"
|
||||
for entry in _paths_for_schema(schema_name, aid):
|
||||
entry["producerLabel"] = (anode.get("title") or "").strip() or aid
|
||||
paths.append(entry)
|
||||
|
||||
# Lexical loop hints (flow.loop): any loop node in ancestors adds synthetic paths
|
||||
for aid in ancestors:
|
||||
anode = node_by_id.get(aid) or {}
|
||||
if anode.get("type") == "flow.loop":
|
||||
paths.extend(
|
||||
[
|
||||
{
|
||||
"producerNodeId": aid,
|
||||
"path": ["currentItem"],
|
||||
"type": "Any",
|
||||
"label": "loop.currentItem",
|
||||
"scopeOrigin": "loop",
|
||||
},
|
||||
{
|
||||
"producerNodeId": aid,
|
||||
"path": ["currentIndex"],
|
||||
"type": "int",
|
||||
"label": "loop.currentIndex",
|
||||
"scopeOrigin": "loop",
|
||||
},
|
||||
{
|
||||
"producerNodeId": aid,
|
||||
"path": ["count"],
|
||||
"type": "int",
|
||||
"label": "loop.count",
|
||||
"scopeOrigin": "loop",
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
return paths
|
||||
|
|
@ -32,7 +32,7 @@ class DataNeutraliserConfig(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -42,7 +42,7 @@ class DataNeutraliserConfig(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
userId: str = Field(
|
||||
|
|
@ -52,7 +52,7 @@ class DataNeutraliserConfig(PowerOnModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
enabled: bool = Field(
|
||||
|
|
@ -107,7 +107,7 @@ class DataNeutralizerAttributes(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate"},
|
||||
"fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
|
|
@ -117,7 +117,7 @@ class DataNeutralizerAttributes(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance"},
|
||||
"fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"},
|
||||
},
|
||||
)
|
||||
userId: str = Field(
|
||||
|
|
@ -127,7 +127,7 @@ class DataNeutralizerAttributes(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": True,
|
||||
"fk_target": {"db": "poweron_app", "table": "User"},
|
||||
"fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"},
|
||||
},
|
||||
)
|
||||
originalText: str = Field(
|
||||
|
|
@ -142,7 +142,7 @@ class DataNeutralizerAttributes(BaseModel):
|
|||
"frontend_type": "text",
|
||||
"frontend_readonly": True,
|
||||
"frontend_required": False,
|
||||
"fk_target": {"db": "poweron_management", "table": "FileItem"},
|
||||
"fk_target": {"db": "poweron_management", "table": "FileItem", "labelField": "fileName"},
|
||||
},
|
||||
)
|
||||
patternType: str = Field(
|
||||
|
|
@ -160,16 +160,16 @@ class DataNeutralizationSnapshot(BaseModel):
|
|||
)
|
||||
mandateId: str = Field(
|
||||
description="Mandate scope",
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate"}},
|
||||
json_schema_extra={"label": "Mandanten-ID", "fk_target": {"db": "poweron_app", "table": "Mandate", "labelField": "label"}},
|
||||
)
|
||||
featureInstanceId: str = Field(
|
||||
default="",
|
||||
description="Feature instance scope",
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance"}},
|
||||
json_schema_extra={"label": "Feature-Instanz-ID", "fk_target": {"db": "poweron_app", "table": "FeatureInstance", "labelField": "label"}},
|
||||
)
|
||||
userId: str = Field(
|
||||
description="User who triggered neutralization",
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "User"}},
|
||||
json_schema_extra={"label": "Benutzer-ID", "fk_target": {"db": "poweron_app", "table": "UserInDB", "labelField": "username"}},
|
||||
)
|
||||
sourceLabel: str = Field(
|
||||
description="Human label, e.g. 'Prompt', 'Kontext', 'Nachricht 3'",
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue